This source file includes following definitions.
- to_talitos_ptr
- copy_talitos_ptr
- from_talitos_ptr_len
- to_talitos_ptr_ext_set
- to_talitos_ptr_ext_or
- __map_single_talitos_ptr
- map_single_talitos_ptr
- map_single_talitos_ptr_nosync
- unmap_single_talitos_ptr
- reset_channel
- reset_device
- init_device
- talitos_submit
- get_request_hdr
- flush_channel
- DEF_TALITOS2_DONE
- report_eu_error
- talitos_error
- DEF_TALITOS2_INTERRUPT
- talitos_rng_data_read
- talitos_rng_init
- talitos_register_rng
- talitos_unregister_rng
- aead_setkey
- aead_des3_setkey
- talitos_sg_unmap
- ipsec_esp_unmap
- ipsec_esp_encrypt_done
- ipsec_esp_decrypt_swauth_done
- ipsec_esp_decrypt_hwauth_done
- sg_to_link_tbl_offset
- talitos_sg_map_ext
- talitos_sg_map
- ipsec_esp
- talitos_edesc_alloc
- aead_edesc_alloc
- aead_encrypt
- aead_decrypt
- ablkcipher_setkey
- ablkcipher_des_setkey
- ablkcipher_des3_setkey
- ablkcipher_aes_setkey
- common_nonsnoop_unmap
- ablkcipher_done
- common_nonsnoop
- ablkcipher_edesc_alloc
- ablkcipher_encrypt
- ablkcipher_decrypt
- common_nonsnoop_hash_unmap
- ahash_done
- talitos_handle_buggy_hash
- common_nonsnoop_hash
- ahash_edesc_alloc
- ahash_init
- ahash_init_sha224_swinit
- ahash_process_req
- ahash_update
- ahash_final
- ahash_finup
- ahash_digest
- ahash_export
- ahash_import
- keyhash
- ahash_setkey
- talitos_init_common
- talitos_cra_init
- talitos_cra_init_aead
- talitos_cra_init_ahash
- talitos_cra_exit
- hw_supports
- talitos_remove
- talitos_alg_alloc
- talitos_probe_irq
- talitos_probe
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15 #include <linux/kernel.h>
16 #include <linux/module.h>
17 #include <linux/mod_devicetable.h>
18 #include <linux/device.h>
19 #include <linux/interrupt.h>
20 #include <linux/crypto.h>
21 #include <linux/hw_random.h>
22 #include <linux/of_address.h>
23 #include <linux/of_irq.h>
24 #include <linux/of_platform.h>
25 #include <linux/dma-mapping.h>
26 #include <linux/io.h>
27 #include <linux/spinlock.h>
28 #include <linux/rtnetlink.h>
29 #include <linux/slab.h>
30
31 #include <crypto/algapi.h>
32 #include <crypto/aes.h>
33 #include <crypto/internal/des.h>
34 #include <crypto/sha.h>
35 #include <crypto/md5.h>
36 #include <crypto/internal/aead.h>
37 #include <crypto/authenc.h>
38 #include <crypto/skcipher.h>
39 #include <crypto/hash.h>
40 #include <crypto/internal/hash.h>
41 #include <crypto/scatterwalk.h>
42
43 #include "talitos.h"
44
45 static void to_talitos_ptr(struct talitos_ptr *ptr, dma_addr_t dma_addr,
46 unsigned int len, bool is_sec1)
47 {
48 ptr->ptr = cpu_to_be32(lower_32_bits(dma_addr));
49 if (is_sec1) {
50 ptr->len1 = cpu_to_be16(len);
51 } else {
52 ptr->len = cpu_to_be16(len);
53 ptr->eptr = upper_32_bits(dma_addr);
54 }
55 }
56
57 static void copy_talitos_ptr(struct talitos_ptr *dst_ptr,
58 struct talitos_ptr *src_ptr, bool is_sec1)
59 {
60 dst_ptr->ptr = src_ptr->ptr;
61 if (is_sec1) {
62 dst_ptr->len1 = src_ptr->len1;
63 } else {
64 dst_ptr->len = src_ptr->len;
65 dst_ptr->eptr = src_ptr->eptr;
66 }
67 }
68
69 static unsigned short from_talitos_ptr_len(struct talitos_ptr *ptr,
70 bool is_sec1)
71 {
72 if (is_sec1)
73 return be16_to_cpu(ptr->len1);
74 else
75 return be16_to_cpu(ptr->len);
76 }
77
78 static void to_talitos_ptr_ext_set(struct talitos_ptr *ptr, u8 val,
79 bool is_sec1)
80 {
81 if (!is_sec1)
82 ptr->j_extent = val;
83 }
84
85 static void to_talitos_ptr_ext_or(struct talitos_ptr *ptr, u8 val, bool is_sec1)
86 {
87 if (!is_sec1)
88 ptr->j_extent |= val;
89 }
90
91
92
93
94 static void __map_single_talitos_ptr(struct device *dev,
95 struct talitos_ptr *ptr,
96 unsigned int len, void *data,
97 enum dma_data_direction dir,
98 unsigned long attrs)
99 {
100 dma_addr_t dma_addr = dma_map_single_attrs(dev, data, len, dir, attrs);
101 struct talitos_private *priv = dev_get_drvdata(dev);
102 bool is_sec1 = has_ftr_sec1(priv);
103
104 to_talitos_ptr(ptr, dma_addr, len, is_sec1);
105 }
106
107 static void map_single_talitos_ptr(struct device *dev,
108 struct talitos_ptr *ptr,
109 unsigned int len, void *data,
110 enum dma_data_direction dir)
111 {
112 __map_single_talitos_ptr(dev, ptr, len, data, dir, 0);
113 }
114
115 static void map_single_talitos_ptr_nosync(struct device *dev,
116 struct talitos_ptr *ptr,
117 unsigned int len, void *data,
118 enum dma_data_direction dir)
119 {
120 __map_single_talitos_ptr(dev, ptr, len, data, dir,
121 DMA_ATTR_SKIP_CPU_SYNC);
122 }
123
124
125
126
127 static void unmap_single_talitos_ptr(struct device *dev,
128 struct talitos_ptr *ptr,
129 enum dma_data_direction dir)
130 {
131 struct talitos_private *priv = dev_get_drvdata(dev);
132 bool is_sec1 = has_ftr_sec1(priv);
133
134 dma_unmap_single(dev, be32_to_cpu(ptr->ptr),
135 from_talitos_ptr_len(ptr, is_sec1), dir);
136 }
137
138 static int reset_channel(struct device *dev, int ch)
139 {
140 struct talitos_private *priv = dev_get_drvdata(dev);
141 unsigned int timeout = TALITOS_TIMEOUT;
142 bool is_sec1 = has_ftr_sec1(priv);
143
144 if (is_sec1) {
145 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
146 TALITOS1_CCCR_LO_RESET);
147
148 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR_LO) &
149 TALITOS1_CCCR_LO_RESET) && --timeout)
150 cpu_relax();
151 } else {
152 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
153 TALITOS2_CCCR_RESET);
154
155 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
156 TALITOS2_CCCR_RESET) && --timeout)
157 cpu_relax();
158 }
159
160 if (timeout == 0) {
161 dev_err(dev, "failed to reset channel %d\n", ch);
162 return -EIO;
163 }
164
165
166 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, TALITOS_CCCR_LO_EAE |
167 TALITOS_CCCR_LO_CDWE | TALITOS_CCCR_LO_CDIE);
168
169 if (is_sec1)
170 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
171 TALITOS_CCCR_LO_NE);
172
173
174 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
175 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO,
176 TALITOS_CCCR_LO_IWSE);
177
178 return 0;
179 }
180
181 static int reset_device(struct device *dev)
182 {
183 struct talitos_private *priv = dev_get_drvdata(dev);
184 unsigned int timeout = TALITOS_TIMEOUT;
185 bool is_sec1 = has_ftr_sec1(priv);
186 u32 mcr = is_sec1 ? TALITOS1_MCR_SWR : TALITOS2_MCR_SWR;
187
188 setbits32(priv->reg + TALITOS_MCR, mcr);
189
190 while ((in_be32(priv->reg + TALITOS_MCR) & mcr)
191 && --timeout)
192 cpu_relax();
193
194 if (priv->irq[1]) {
195 mcr = TALITOS_MCR_RCA1 | TALITOS_MCR_RCA3;
196 setbits32(priv->reg + TALITOS_MCR, mcr);
197 }
198
199 if (timeout == 0) {
200 dev_err(dev, "failed to reset device\n");
201 return -EIO;
202 }
203
204 return 0;
205 }
206
207
208
209
210 static int init_device(struct device *dev)
211 {
212 struct talitos_private *priv = dev_get_drvdata(dev);
213 int ch, err;
214 bool is_sec1 = has_ftr_sec1(priv);
215
216
217
218
219
220
221
222 err = reset_device(dev);
223 if (err)
224 return err;
225
226 err = reset_device(dev);
227 if (err)
228 return err;
229
230
231 for (ch = 0; ch < priv->num_channels; ch++) {
232 err = reset_channel(dev, ch);
233 if (err)
234 return err;
235 }
236
237
238 if (is_sec1) {
239 clrbits32(priv->reg + TALITOS_IMR, TALITOS1_IMR_INIT);
240 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT);
241
242 setbits32(priv->reg_deu + TALITOS_EUICR, TALITOS1_DEUICR_KPE);
243 } else {
244 setbits32(priv->reg + TALITOS_IMR, TALITOS2_IMR_INIT);
245 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT);
246 }
247
248
249 if (priv->features & TALITOS_FTR_HW_AUTH_CHECK)
250 setbits32(priv->reg_mdeu + TALITOS_EUICR_LO,
251 TALITOS_MDEUICR_LO_ICE);
252
253 return 0;
254 }
255
256
257
258
259
260
261
262
263
264
265
266
267
268 static int talitos_submit(struct device *dev, int ch, struct talitos_desc *desc,
269 void (*callback)(struct device *dev,
270 struct talitos_desc *desc,
271 void *context, int error),
272 void *context)
273 {
274 struct talitos_private *priv = dev_get_drvdata(dev);
275 struct talitos_request *request;
276 unsigned long flags;
277 int head;
278 bool is_sec1 = has_ftr_sec1(priv);
279
280 spin_lock_irqsave(&priv->chan[ch].head_lock, flags);
281
282 if (!atomic_inc_not_zero(&priv->chan[ch].submit_count)) {
283
284 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
285 return -EAGAIN;
286 }
287
288 head = priv->chan[ch].head;
289 request = &priv->chan[ch].fifo[head];
290
291
292 if (is_sec1) {
293 desc->hdr1 = desc->hdr;
294 request->dma_desc = dma_map_single(dev, &desc->hdr1,
295 TALITOS_DESC_SIZE,
296 DMA_BIDIRECTIONAL);
297 } else {
298 request->dma_desc = dma_map_single(dev, desc,
299 TALITOS_DESC_SIZE,
300 DMA_BIDIRECTIONAL);
301 }
302 request->callback = callback;
303 request->context = context;
304
305
306 priv->chan[ch].head = (priv->chan[ch].head + 1) & (priv->fifo_len - 1);
307
308 smp_wmb();
309 request->desc = desc;
310
311
312 wmb();
313 out_be32(priv->chan[ch].reg + TALITOS_FF,
314 upper_32_bits(request->dma_desc));
315 out_be32(priv->chan[ch].reg + TALITOS_FF_LO,
316 lower_32_bits(request->dma_desc));
317
318 spin_unlock_irqrestore(&priv->chan[ch].head_lock, flags);
319
320 return -EINPROGRESS;
321 }
322
323 static __be32 get_request_hdr(struct talitos_request *request, bool is_sec1)
324 {
325 struct talitos_edesc *edesc;
326
327 if (!is_sec1)
328 return request->desc->hdr;
329
330 if (!request->desc->next_desc)
331 return request->desc->hdr1;
332
333 edesc = container_of(request->desc, struct talitos_edesc, desc);
334
335 return ((struct talitos_desc *)(edesc->buf + edesc->dma_len))->hdr1;
336 }
337
338
339
340
341 static void flush_channel(struct device *dev, int ch, int error, int reset_ch)
342 {
343 struct talitos_private *priv = dev_get_drvdata(dev);
344 struct talitos_request *request, saved_req;
345 unsigned long flags;
346 int tail, status;
347 bool is_sec1 = has_ftr_sec1(priv);
348
349 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
350
351 tail = priv->chan[ch].tail;
352 while (priv->chan[ch].fifo[tail].desc) {
353 __be32 hdr;
354
355 request = &priv->chan[ch].fifo[tail];
356
357
358 rmb();
359 hdr = get_request_hdr(request, is_sec1);
360
361 if ((hdr & DESC_HDR_DONE) == DESC_HDR_DONE)
362 status = 0;
363 else
364 if (!error)
365 break;
366 else
367 status = error;
368
369 dma_unmap_single(dev, request->dma_desc,
370 TALITOS_DESC_SIZE,
371 DMA_BIDIRECTIONAL);
372
373
374 saved_req.desc = request->desc;
375 saved_req.callback = request->callback;
376 saved_req.context = request->context;
377
378
379 smp_wmb();
380 request->desc = NULL;
381
382
383 priv->chan[ch].tail = (tail + 1) & (priv->fifo_len - 1);
384
385 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
386
387 atomic_dec(&priv->chan[ch].submit_count);
388
389 saved_req.callback(dev, saved_req.desc, saved_req.context,
390 status);
391
392 if (error && !reset_ch && status == error)
393 return;
394 spin_lock_irqsave(&priv->chan[ch].tail_lock, flags);
395 tail = priv->chan[ch].tail;
396 }
397
398 spin_unlock_irqrestore(&priv->chan[ch].tail_lock, flags);
399 }
400
401
402
403
404 #define DEF_TALITOS1_DONE(name, ch_done_mask) \
405 static void talitos1_done_##name(unsigned long data) \
406 { \
407 struct device *dev = (struct device *)data; \
408 struct talitos_private *priv = dev_get_drvdata(dev); \
409 unsigned long flags; \
410 \
411 if (ch_done_mask & 0x10000000) \
412 flush_channel(dev, 0, 0, 0); \
413 if (ch_done_mask & 0x40000000) \
414 flush_channel(dev, 1, 0, 0); \
415 if (ch_done_mask & 0x00010000) \
416 flush_channel(dev, 2, 0, 0); \
417 if (ch_done_mask & 0x00040000) \
418 flush_channel(dev, 3, 0, 0); \
419 \
420 \
421 \
422 spin_lock_irqsave(&priv->reg_lock, flags); \
423 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
424 clrbits32(priv->reg + TALITOS_IMR_LO, TALITOS1_IMR_LO_INIT); \
425 spin_unlock_irqrestore(&priv->reg_lock, flags); \
426 }
427
428 DEF_TALITOS1_DONE(4ch, TALITOS1_ISR_4CHDONE)
429 DEF_TALITOS1_DONE(ch0, TALITOS1_ISR_CH_0_DONE)
430
431 #define DEF_TALITOS2_DONE(name, ch_done_mask) \
432 static void talitos2_done_##name(unsigned long data) \
433 { \
434 struct device *dev = (struct device *)data; \
435 struct talitos_private *priv = dev_get_drvdata(dev); \
436 unsigned long flags; \
437 \
438 if (ch_done_mask & 1) \
439 flush_channel(dev, 0, 0, 0); \
440 if (ch_done_mask & (1 << 2)) \
441 flush_channel(dev, 1, 0, 0); \
442 if (ch_done_mask & (1 << 4)) \
443 flush_channel(dev, 2, 0, 0); \
444 if (ch_done_mask & (1 << 6)) \
445 flush_channel(dev, 3, 0, 0); \
446 \
447 \
448 \
449 spin_lock_irqsave(&priv->reg_lock, flags); \
450 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
451 setbits32(priv->reg + TALITOS_IMR_LO, TALITOS2_IMR_LO_INIT); \
452 spin_unlock_irqrestore(&priv->reg_lock, flags); \
453 }
454
455 DEF_TALITOS2_DONE(4ch, TALITOS2_ISR_4CHDONE)
456 DEF_TALITOS2_DONE(ch0, TALITOS2_ISR_CH_0_DONE)
457 DEF_TALITOS2_DONE(ch0_2, TALITOS2_ISR_CH_0_2_DONE)
458 DEF_TALITOS2_DONE(ch1_3, TALITOS2_ISR_CH_1_3_DONE)
459
460
461
462
463 static u32 current_desc_hdr(struct device *dev, int ch)
464 {
465 struct talitos_private *priv = dev_get_drvdata(dev);
466 int tail, iter;
467 dma_addr_t cur_desc;
468
469 cur_desc = ((u64)in_be32(priv->chan[ch].reg + TALITOS_CDPR)) << 32;
470 cur_desc |= in_be32(priv->chan[ch].reg + TALITOS_CDPR_LO);
471
472 if (!cur_desc) {
473 dev_err(dev, "CDPR is NULL, giving up search for offending descriptor\n");
474 return 0;
475 }
476
477 tail = priv->chan[ch].tail;
478
479 iter = tail;
480 while (priv->chan[ch].fifo[iter].dma_desc != cur_desc &&
481 priv->chan[ch].fifo[iter].desc->next_desc != cur_desc) {
482 iter = (iter + 1) & (priv->fifo_len - 1);
483 if (iter == tail) {
484 dev_err(dev, "couldn't locate current descriptor\n");
485 return 0;
486 }
487 }
488
489 if (priv->chan[ch].fifo[iter].desc->next_desc == cur_desc) {
490 struct talitos_edesc *edesc;
491
492 edesc = container_of(priv->chan[ch].fifo[iter].desc,
493 struct talitos_edesc, desc);
494 return ((struct talitos_desc *)
495 (edesc->buf + edesc->dma_len))->hdr;
496 }
497
498 return priv->chan[ch].fifo[iter].desc->hdr;
499 }
500
501
502
503
504 static void report_eu_error(struct device *dev, int ch, u32 desc_hdr)
505 {
506 struct talitos_private *priv = dev_get_drvdata(dev);
507 int i;
508
509 if (!desc_hdr)
510 desc_hdr = in_be32(priv->chan[ch].reg + TALITOS_DESCBUF);
511
512 switch (desc_hdr & DESC_HDR_SEL0_MASK) {
513 case DESC_HDR_SEL0_AFEU:
514 dev_err(dev, "AFEUISR 0x%08x_%08x\n",
515 in_be32(priv->reg_afeu + TALITOS_EUISR),
516 in_be32(priv->reg_afeu + TALITOS_EUISR_LO));
517 break;
518 case DESC_HDR_SEL0_DEU:
519 dev_err(dev, "DEUISR 0x%08x_%08x\n",
520 in_be32(priv->reg_deu + TALITOS_EUISR),
521 in_be32(priv->reg_deu + TALITOS_EUISR_LO));
522 break;
523 case DESC_HDR_SEL0_MDEUA:
524 case DESC_HDR_SEL0_MDEUB:
525 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
526 in_be32(priv->reg_mdeu + TALITOS_EUISR),
527 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
528 break;
529 case DESC_HDR_SEL0_RNG:
530 dev_err(dev, "RNGUISR 0x%08x_%08x\n",
531 in_be32(priv->reg_rngu + TALITOS_ISR),
532 in_be32(priv->reg_rngu + TALITOS_ISR_LO));
533 break;
534 case DESC_HDR_SEL0_PKEU:
535 dev_err(dev, "PKEUISR 0x%08x_%08x\n",
536 in_be32(priv->reg_pkeu + TALITOS_EUISR),
537 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
538 break;
539 case DESC_HDR_SEL0_AESU:
540 dev_err(dev, "AESUISR 0x%08x_%08x\n",
541 in_be32(priv->reg_aesu + TALITOS_EUISR),
542 in_be32(priv->reg_aesu + TALITOS_EUISR_LO));
543 break;
544 case DESC_HDR_SEL0_CRCU:
545 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
546 in_be32(priv->reg_crcu + TALITOS_EUISR),
547 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
548 break;
549 case DESC_HDR_SEL0_KEU:
550 dev_err(dev, "KEUISR 0x%08x_%08x\n",
551 in_be32(priv->reg_pkeu + TALITOS_EUISR),
552 in_be32(priv->reg_pkeu + TALITOS_EUISR_LO));
553 break;
554 }
555
556 switch (desc_hdr & DESC_HDR_SEL1_MASK) {
557 case DESC_HDR_SEL1_MDEUA:
558 case DESC_HDR_SEL1_MDEUB:
559 dev_err(dev, "MDEUISR 0x%08x_%08x\n",
560 in_be32(priv->reg_mdeu + TALITOS_EUISR),
561 in_be32(priv->reg_mdeu + TALITOS_EUISR_LO));
562 break;
563 case DESC_HDR_SEL1_CRCU:
564 dev_err(dev, "CRCUISR 0x%08x_%08x\n",
565 in_be32(priv->reg_crcu + TALITOS_EUISR),
566 in_be32(priv->reg_crcu + TALITOS_EUISR_LO));
567 break;
568 }
569
570 for (i = 0; i < 8; i++)
571 dev_err(dev, "DESCBUF 0x%08x_%08x\n",
572 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF + 8*i),
573 in_be32(priv->chan[ch].reg + TALITOS_DESCBUF_LO + 8*i));
574 }
575
576
577
578
579 static void talitos_error(struct device *dev, u32 isr, u32 isr_lo)
580 {
581 struct talitos_private *priv = dev_get_drvdata(dev);
582 unsigned int timeout = TALITOS_TIMEOUT;
583 int ch, error, reset_dev = 0;
584 u32 v_lo;
585 bool is_sec1 = has_ftr_sec1(priv);
586 int reset_ch = is_sec1 ? 1 : 0;
587
588 for (ch = 0; ch < priv->num_channels; ch++) {
589
590 if (is_sec1) {
591
592 if (!(isr & (1 << (29 + (ch & 1) * 2 - (ch & 2) * 6))))
593 continue;
594 } else {
595 if (!(isr & (1 << (ch * 2 + 1))))
596 continue;
597 }
598
599 error = -EINVAL;
600
601 v_lo = in_be32(priv->chan[ch].reg + TALITOS_CCPSR_LO);
602
603 if (v_lo & TALITOS_CCPSR_LO_DOF) {
604 dev_err(dev, "double fetch fifo overflow error\n");
605 error = -EAGAIN;
606 reset_ch = 1;
607 }
608 if (v_lo & TALITOS_CCPSR_LO_SOF) {
609
610 dev_err(dev, "single fetch fifo overflow error\n");
611 error = -EAGAIN;
612 }
613 if (v_lo & TALITOS_CCPSR_LO_MDTE)
614 dev_err(dev, "master data transfer error\n");
615 if (v_lo & TALITOS_CCPSR_LO_SGDLZ)
616 dev_err(dev, is_sec1 ? "pointer not complete error\n"
617 : "s/g data length zero error\n");
618 if (v_lo & TALITOS_CCPSR_LO_FPZ)
619 dev_err(dev, is_sec1 ? "parity error\n"
620 : "fetch pointer zero error\n");
621 if (v_lo & TALITOS_CCPSR_LO_IDH)
622 dev_err(dev, "illegal descriptor header error\n");
623 if (v_lo & TALITOS_CCPSR_LO_IEU)
624 dev_err(dev, is_sec1 ? "static assignment error\n"
625 : "invalid exec unit error\n");
626 if (v_lo & TALITOS_CCPSR_LO_EU)
627 report_eu_error(dev, ch, current_desc_hdr(dev, ch));
628 if (!is_sec1) {
629 if (v_lo & TALITOS_CCPSR_LO_GB)
630 dev_err(dev, "gather boundary error\n");
631 if (v_lo & TALITOS_CCPSR_LO_GRL)
632 dev_err(dev, "gather return/length error\n");
633 if (v_lo & TALITOS_CCPSR_LO_SB)
634 dev_err(dev, "scatter boundary error\n");
635 if (v_lo & TALITOS_CCPSR_LO_SRL)
636 dev_err(dev, "scatter return/length error\n");
637 }
638
639 flush_channel(dev, ch, error, reset_ch);
640
641 if (reset_ch) {
642 reset_channel(dev, ch);
643 } else {
644 setbits32(priv->chan[ch].reg + TALITOS_CCCR,
645 TALITOS2_CCCR_CONT);
646 setbits32(priv->chan[ch].reg + TALITOS_CCCR_LO, 0);
647 while ((in_be32(priv->chan[ch].reg + TALITOS_CCCR) &
648 TALITOS2_CCCR_CONT) && --timeout)
649 cpu_relax();
650 if (timeout == 0) {
651 dev_err(dev, "failed to restart channel %d\n",
652 ch);
653 reset_dev = 1;
654 }
655 }
656 }
657 if (reset_dev || (is_sec1 && isr & ~TALITOS1_ISR_4CHERR) ||
658 (!is_sec1 && isr & ~TALITOS2_ISR_4CHERR) || isr_lo) {
659 if (is_sec1 && (isr_lo & TALITOS1_ISR_TEA_ERR))
660 dev_err(dev, "TEA error: ISR 0x%08x_%08x\n",
661 isr, isr_lo);
662 else
663 dev_err(dev, "done overflow, internal time out, or "
664 "rngu error: ISR 0x%08x_%08x\n", isr, isr_lo);
665
666
667 for (ch = 0; ch < priv->num_channels; ch++)
668 flush_channel(dev, ch, -EIO, 1);
669
670
671 init_device(dev);
672 }
673 }
674
675 #define DEF_TALITOS1_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
676 static irqreturn_t talitos1_interrupt_##name(int irq, void *data) \
677 { \
678 struct device *dev = data; \
679 struct talitos_private *priv = dev_get_drvdata(dev); \
680 u32 isr, isr_lo; \
681 unsigned long flags; \
682 \
683 spin_lock_irqsave(&priv->reg_lock, flags); \
684 isr = in_be32(priv->reg + TALITOS_ISR); \
685 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
686 \
687 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
688 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
689 \
690 if (unlikely(isr & ch_err_mask || isr_lo & TALITOS1_IMR_LO_INIT)) { \
691 spin_unlock_irqrestore(&priv->reg_lock, flags); \
692 talitos_error(dev, isr & ch_err_mask, isr_lo); \
693 } \
694 else { \
695 if (likely(isr & ch_done_mask)) { \
696 \
697 setbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
698 \
699 tasklet_schedule(&priv->done_task[tlet]); \
700 } \
701 spin_unlock_irqrestore(&priv->reg_lock, flags); \
702 } \
703 \
704 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
705 IRQ_NONE; \
706 }
707
708 DEF_TALITOS1_INTERRUPT(4ch, TALITOS1_ISR_4CHDONE, TALITOS1_ISR_4CHERR, 0)
709
710 #define DEF_TALITOS2_INTERRUPT(name, ch_done_mask, ch_err_mask, tlet) \
711 static irqreturn_t talitos2_interrupt_##name(int irq, void *data) \
712 { \
713 struct device *dev = data; \
714 struct talitos_private *priv = dev_get_drvdata(dev); \
715 u32 isr, isr_lo; \
716 unsigned long flags; \
717 \
718 spin_lock_irqsave(&priv->reg_lock, flags); \
719 isr = in_be32(priv->reg + TALITOS_ISR); \
720 isr_lo = in_be32(priv->reg + TALITOS_ISR_LO); \
721 \
722 out_be32(priv->reg + TALITOS_ICR, isr & (ch_done_mask | ch_err_mask)); \
723 out_be32(priv->reg + TALITOS_ICR_LO, isr_lo); \
724 \
725 if (unlikely(isr & ch_err_mask || isr_lo)) { \
726 spin_unlock_irqrestore(&priv->reg_lock, flags); \
727 talitos_error(dev, isr & ch_err_mask, isr_lo); \
728 } \
729 else { \
730 if (likely(isr & ch_done_mask)) { \
731 \
732 clrbits32(priv->reg + TALITOS_IMR, ch_done_mask); \
733 \
734 tasklet_schedule(&priv->done_task[tlet]); \
735 } \
736 spin_unlock_irqrestore(&priv->reg_lock, flags); \
737 } \
738 \
739 return (isr & (ch_done_mask | ch_err_mask) || isr_lo) ? IRQ_HANDLED : \
740 IRQ_NONE; \
741 }
742
743 DEF_TALITOS2_INTERRUPT(4ch, TALITOS2_ISR_4CHDONE, TALITOS2_ISR_4CHERR, 0)
744 DEF_TALITOS2_INTERRUPT(ch0_2, TALITOS2_ISR_CH_0_2_DONE, TALITOS2_ISR_CH_0_2_ERR,
745 0)
746 DEF_TALITOS2_INTERRUPT(ch1_3, TALITOS2_ISR_CH_1_3_DONE, TALITOS2_ISR_CH_1_3_ERR,
747 1)
748
749
750
751
752 static int talitos_rng_data_present(struct hwrng *rng, int wait)
753 {
754 struct device *dev = (struct device *)rng->priv;
755 struct talitos_private *priv = dev_get_drvdata(dev);
756 u32 ofl;
757 int i;
758
759 for (i = 0; i < 20; i++) {
760 ofl = in_be32(priv->reg_rngu + TALITOS_EUSR_LO) &
761 TALITOS_RNGUSR_LO_OFL;
762 if (ofl || !wait)
763 break;
764 udelay(10);
765 }
766
767 return !!ofl;
768 }
769
770 static int talitos_rng_data_read(struct hwrng *rng, u32 *data)
771 {
772 struct device *dev = (struct device *)rng->priv;
773 struct talitos_private *priv = dev_get_drvdata(dev);
774
775
776 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO);
777 *data = in_be32(priv->reg_rngu + TALITOS_EU_FIFO_LO);
778
779 return sizeof(u32);
780 }
781
782 static int talitos_rng_init(struct hwrng *rng)
783 {
784 struct device *dev = (struct device *)rng->priv;
785 struct talitos_private *priv = dev_get_drvdata(dev);
786 unsigned int timeout = TALITOS_TIMEOUT;
787
788 setbits32(priv->reg_rngu + TALITOS_EURCR_LO, TALITOS_RNGURCR_LO_SR);
789 while (!(in_be32(priv->reg_rngu + TALITOS_EUSR_LO)
790 & TALITOS_RNGUSR_LO_RD)
791 && --timeout)
792 cpu_relax();
793 if (timeout == 0) {
794 dev_err(dev, "failed to reset rng hw\n");
795 return -ENODEV;
796 }
797
798
799 setbits32(priv->reg_rngu + TALITOS_EUDSR_LO, 0);
800
801 return 0;
802 }
803
804 static int talitos_register_rng(struct device *dev)
805 {
806 struct talitos_private *priv = dev_get_drvdata(dev);
807 int err;
808
809 priv->rng.name = dev_driver_string(dev),
810 priv->rng.init = talitos_rng_init,
811 priv->rng.data_present = talitos_rng_data_present,
812 priv->rng.data_read = talitos_rng_data_read,
813 priv->rng.priv = (unsigned long)dev;
814
815 err = hwrng_register(&priv->rng);
816 if (!err)
817 priv->rng_registered = true;
818
819 return err;
820 }
821
822 static void talitos_unregister_rng(struct device *dev)
823 {
824 struct talitos_private *priv = dev_get_drvdata(dev);
825
826 if (!priv->rng_registered)
827 return;
828
829 hwrng_unregister(&priv->rng);
830 priv->rng_registered = false;
831 }
832
833
834
835
836 #define TALITOS_CRA_PRIORITY 3000
837
838
839
840
841 #define TALITOS_CRA_PRIORITY_AEAD_HSNA (TALITOS_CRA_PRIORITY - 1)
842 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
843 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA512_BLOCK_SIZE)
844 #else
845 #define TALITOS_MAX_KEY_SIZE (AES_MAX_KEY_SIZE + SHA256_BLOCK_SIZE)
846 #endif
847 #define TALITOS_MAX_IV_LENGTH 16
848
849 struct talitos_ctx {
850 struct device *dev;
851 int ch;
852 __be32 desc_hdr_template;
853 u8 key[TALITOS_MAX_KEY_SIZE];
854 u8 iv[TALITOS_MAX_IV_LENGTH];
855 dma_addr_t dma_key;
856 unsigned int keylen;
857 unsigned int enckeylen;
858 unsigned int authkeylen;
859 };
860
861 #define HASH_MAX_BLOCK_SIZE SHA512_BLOCK_SIZE
862 #define TALITOS_MDEU_MAX_CONTEXT_SIZE TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512
863
864 struct talitos_ahash_req_ctx {
865 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
866 unsigned int hw_context_size;
867 u8 buf[2][HASH_MAX_BLOCK_SIZE];
868 int buf_idx;
869 unsigned int swinit;
870 unsigned int first;
871 unsigned int last;
872 unsigned int to_hash_later;
873 unsigned int nbuf;
874 struct scatterlist bufsl[2];
875 struct scatterlist *psrc;
876 };
877
878 struct talitos_export_state {
879 u32 hw_context[TALITOS_MDEU_MAX_CONTEXT_SIZE / sizeof(u32)];
880 u8 buf[HASH_MAX_BLOCK_SIZE];
881 unsigned int swinit;
882 unsigned int first;
883 unsigned int last;
884 unsigned int to_hash_later;
885 unsigned int nbuf;
886 };
887
888 static int aead_setkey(struct crypto_aead *authenc,
889 const u8 *key, unsigned int keylen)
890 {
891 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
892 struct device *dev = ctx->dev;
893 struct crypto_authenc_keys keys;
894
895 if (crypto_authenc_extractkeys(&keys, key, keylen) != 0)
896 goto badkey;
897
898 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
899 goto badkey;
900
901 if (ctx->keylen)
902 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
903
904 memcpy(ctx->key, keys.authkey, keys.authkeylen);
905 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
906
907 ctx->keylen = keys.authkeylen + keys.enckeylen;
908 ctx->enckeylen = keys.enckeylen;
909 ctx->authkeylen = keys.authkeylen;
910 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
911 DMA_TO_DEVICE);
912
913 memzero_explicit(&keys, sizeof(keys));
914 return 0;
915
916 badkey:
917 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
918 memzero_explicit(&keys, sizeof(keys));
919 return -EINVAL;
920 }
921
922 static int aead_des3_setkey(struct crypto_aead *authenc,
923 const u8 *key, unsigned int keylen)
924 {
925 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
926 struct device *dev = ctx->dev;
927 struct crypto_authenc_keys keys;
928 int err;
929
930 err = crypto_authenc_extractkeys(&keys, key, keylen);
931 if (unlikely(err))
932 goto badkey;
933
934 err = -EINVAL;
935 if (keys.authkeylen + keys.enckeylen > TALITOS_MAX_KEY_SIZE)
936 goto badkey;
937
938 err = verify_aead_des3_key(authenc, keys.enckey, keys.enckeylen);
939 if (err)
940 goto out;
941
942 if (ctx->keylen)
943 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
944
945 memcpy(ctx->key, keys.authkey, keys.authkeylen);
946 memcpy(&ctx->key[keys.authkeylen], keys.enckey, keys.enckeylen);
947
948 ctx->keylen = keys.authkeylen + keys.enckeylen;
949 ctx->enckeylen = keys.enckeylen;
950 ctx->authkeylen = keys.authkeylen;
951 ctx->dma_key = dma_map_single(dev, ctx->key, ctx->keylen,
952 DMA_TO_DEVICE);
953
954 out:
955 memzero_explicit(&keys, sizeof(keys));
956 return err;
957
958 badkey:
959 crypto_aead_set_flags(authenc, CRYPTO_TFM_RES_BAD_KEY_LEN);
960 goto out;
961 }
962
963 static void talitos_sg_unmap(struct device *dev,
964 struct talitos_edesc *edesc,
965 struct scatterlist *src,
966 struct scatterlist *dst,
967 unsigned int len, unsigned int offset)
968 {
969 struct talitos_private *priv = dev_get_drvdata(dev);
970 bool is_sec1 = has_ftr_sec1(priv);
971 unsigned int src_nents = edesc->src_nents ? : 1;
972 unsigned int dst_nents = edesc->dst_nents ? : 1;
973
974 if (is_sec1 && dst && dst_nents > 1) {
975 dma_sync_single_for_device(dev, edesc->dma_link_tbl + offset,
976 len, DMA_FROM_DEVICE);
977 sg_pcopy_from_buffer(dst, dst_nents, edesc->buf + offset, len,
978 offset);
979 }
980 if (src != dst) {
981 if (src_nents == 1 || !is_sec1)
982 dma_unmap_sg(dev, src, src_nents, DMA_TO_DEVICE);
983
984 if (dst && (dst_nents == 1 || !is_sec1))
985 dma_unmap_sg(dev, dst, dst_nents, DMA_FROM_DEVICE);
986 } else if (src_nents == 1 || !is_sec1) {
987 dma_unmap_sg(dev, src, src_nents, DMA_BIDIRECTIONAL);
988 }
989 }
990
991 static void ipsec_esp_unmap(struct device *dev,
992 struct talitos_edesc *edesc,
993 struct aead_request *areq, bool encrypt)
994 {
995 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
996 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
997 unsigned int ivsize = crypto_aead_ivsize(aead);
998 unsigned int authsize = crypto_aead_authsize(aead);
999 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1000 bool is_ipsec_esp = edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP;
1001 struct talitos_ptr *civ_ptr = &edesc->desc.ptr[is_ipsec_esp ? 2 : 3];
1002
1003 if (is_ipsec_esp)
1004 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[6],
1005 DMA_FROM_DEVICE);
1006 unmap_single_talitos_ptr(dev, civ_ptr, DMA_TO_DEVICE);
1007
1008 talitos_sg_unmap(dev, edesc, areq->src, areq->dst,
1009 cryptlen + authsize, areq->assoclen);
1010
1011 if (edesc->dma_len)
1012 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1013 DMA_BIDIRECTIONAL);
1014
1015 if (!is_ipsec_esp) {
1016 unsigned int dst_nents = edesc->dst_nents ? : 1;
1017
1018 sg_pcopy_to_buffer(areq->dst, dst_nents, ctx->iv, ivsize,
1019 areq->assoclen + cryptlen - ivsize);
1020 }
1021 }
1022
1023
1024
1025
1026 static void ipsec_esp_encrypt_done(struct device *dev,
1027 struct talitos_desc *desc, void *context,
1028 int err)
1029 {
1030 struct aead_request *areq = context;
1031 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1032 unsigned int ivsize = crypto_aead_ivsize(authenc);
1033 struct talitos_edesc *edesc;
1034
1035 edesc = container_of(desc, struct talitos_edesc, desc);
1036
1037 ipsec_esp_unmap(dev, edesc, areq, true);
1038
1039 dma_unmap_single(dev, edesc->iv_dma, ivsize, DMA_TO_DEVICE);
1040
1041 kfree(edesc);
1042
1043 aead_request_complete(areq, err);
1044 }
1045
1046 static void ipsec_esp_decrypt_swauth_done(struct device *dev,
1047 struct talitos_desc *desc,
1048 void *context, int err)
1049 {
1050 struct aead_request *req = context;
1051 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1052 unsigned int authsize = crypto_aead_authsize(authenc);
1053 struct talitos_edesc *edesc;
1054 char *oicv, *icv;
1055
1056 edesc = container_of(desc, struct talitos_edesc, desc);
1057
1058 ipsec_esp_unmap(dev, edesc, req, false);
1059
1060 if (!err) {
1061
1062 oicv = edesc->buf + edesc->dma_len;
1063 icv = oicv - authsize;
1064
1065 err = crypto_memneq(oicv, icv, authsize) ? -EBADMSG : 0;
1066 }
1067
1068 kfree(edesc);
1069
1070 aead_request_complete(req, err);
1071 }
1072
1073 static void ipsec_esp_decrypt_hwauth_done(struct device *dev,
1074 struct talitos_desc *desc,
1075 void *context, int err)
1076 {
1077 struct aead_request *req = context;
1078 struct talitos_edesc *edesc;
1079
1080 edesc = container_of(desc, struct talitos_edesc, desc);
1081
1082 ipsec_esp_unmap(dev, edesc, req, false);
1083
1084
1085 if (!err && ((desc->hdr_lo & DESC_HDR_LO_ICCR1_MASK) !=
1086 DESC_HDR_LO_ICCR1_PASS))
1087 err = -EBADMSG;
1088
1089 kfree(edesc);
1090
1091 aead_request_complete(req, err);
1092 }
1093
1094
1095
1096
1097
1098 static int sg_to_link_tbl_offset(struct scatterlist *sg, int sg_count,
1099 unsigned int offset, int datalen, int elen,
1100 struct talitos_ptr *link_tbl_ptr)
1101 {
1102 int n_sg = elen ? sg_count + 1 : sg_count;
1103 int count = 0;
1104 int cryptlen = datalen + elen;
1105
1106 while (cryptlen && sg && n_sg--) {
1107 unsigned int len = sg_dma_len(sg);
1108
1109 if (offset >= len) {
1110 offset -= len;
1111 goto next;
1112 }
1113
1114 len -= offset;
1115
1116 if (len > cryptlen)
1117 len = cryptlen;
1118
1119 if (datalen > 0 && len > datalen) {
1120 to_talitos_ptr(link_tbl_ptr + count,
1121 sg_dma_address(sg) + offset, datalen, 0);
1122 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1123 count++;
1124 len -= datalen;
1125 offset += datalen;
1126 }
1127 to_talitos_ptr(link_tbl_ptr + count,
1128 sg_dma_address(sg) + offset, len, 0);
1129 to_talitos_ptr_ext_set(link_tbl_ptr + count, 0, 0);
1130 count++;
1131 cryptlen -= len;
1132 datalen -= len;
1133 offset = 0;
1134
1135 next:
1136 sg = sg_next(sg);
1137 }
1138
1139
1140 if (count > 0)
1141 to_talitos_ptr_ext_set(link_tbl_ptr + count - 1,
1142 DESC_PTR_LNKTBL_RET, 0);
1143
1144 return count;
1145 }
1146
1147 static int talitos_sg_map_ext(struct device *dev, struct scatterlist *src,
1148 unsigned int len, struct talitos_edesc *edesc,
1149 struct talitos_ptr *ptr, int sg_count,
1150 unsigned int offset, int tbl_off, int elen,
1151 bool force)
1152 {
1153 struct talitos_private *priv = dev_get_drvdata(dev);
1154 bool is_sec1 = has_ftr_sec1(priv);
1155
1156 if (!src) {
1157 to_talitos_ptr(ptr, 0, 0, is_sec1);
1158 return 1;
1159 }
1160 to_talitos_ptr_ext_set(ptr, elen, is_sec1);
1161 if (sg_count == 1 && !force) {
1162 to_talitos_ptr(ptr, sg_dma_address(src) + offset, len, is_sec1);
1163 return sg_count;
1164 }
1165 if (is_sec1) {
1166 to_talitos_ptr(ptr, edesc->dma_link_tbl + offset, len, is_sec1);
1167 return sg_count;
1168 }
1169 sg_count = sg_to_link_tbl_offset(src, sg_count, offset, len, elen,
1170 &edesc->link_tbl[tbl_off]);
1171 if (sg_count == 1 && !force) {
1172
1173 copy_talitos_ptr(ptr, &edesc->link_tbl[tbl_off], is_sec1);
1174 return sg_count;
1175 }
1176 to_talitos_ptr(ptr, edesc->dma_link_tbl +
1177 tbl_off * sizeof(struct talitos_ptr), len, is_sec1);
1178 to_talitos_ptr_ext_or(ptr, DESC_PTR_LNKTBL_JUMP, is_sec1);
1179
1180 return sg_count;
1181 }
1182
1183 static int talitos_sg_map(struct device *dev, struct scatterlist *src,
1184 unsigned int len, struct talitos_edesc *edesc,
1185 struct talitos_ptr *ptr, int sg_count,
1186 unsigned int offset, int tbl_off)
1187 {
1188 return talitos_sg_map_ext(dev, src, len, edesc, ptr, sg_count, offset,
1189 tbl_off, 0, false);
1190 }
1191
1192
1193
1194
1195 static int ipsec_esp(struct talitos_edesc *edesc, struct aead_request *areq,
1196 bool encrypt,
1197 void (*callback)(struct device *dev,
1198 struct talitos_desc *desc,
1199 void *context, int error))
1200 {
1201 struct crypto_aead *aead = crypto_aead_reqtfm(areq);
1202 unsigned int authsize = crypto_aead_authsize(aead);
1203 struct talitos_ctx *ctx = crypto_aead_ctx(aead);
1204 struct device *dev = ctx->dev;
1205 struct talitos_desc *desc = &edesc->desc;
1206 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1207 unsigned int ivsize = crypto_aead_ivsize(aead);
1208 int tbl_off = 0;
1209 int sg_count, ret;
1210 int elen = 0;
1211 bool sync_needed = false;
1212 struct talitos_private *priv = dev_get_drvdata(dev);
1213 bool is_sec1 = has_ftr_sec1(priv);
1214 bool is_ipsec_esp = desc->hdr & DESC_HDR_TYPE_IPSEC_ESP;
1215 struct talitos_ptr *civ_ptr = &desc->ptr[is_ipsec_esp ? 2 : 3];
1216 struct talitos_ptr *ckey_ptr = &desc->ptr[is_ipsec_esp ? 3 : 2];
1217 dma_addr_t dma_icv = edesc->dma_link_tbl + edesc->dma_len - authsize;
1218
1219
1220 to_talitos_ptr(&desc->ptr[0], ctx->dma_key, ctx->authkeylen, is_sec1);
1221
1222 sg_count = edesc->src_nents ?: 1;
1223 if (is_sec1 && sg_count > 1)
1224 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1225 areq->assoclen + cryptlen);
1226 else
1227 sg_count = dma_map_sg(dev, areq->src, sg_count,
1228 (areq->src == areq->dst) ?
1229 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1230
1231
1232 ret = talitos_sg_map(dev, areq->src, areq->assoclen, edesc,
1233 &desc->ptr[1], sg_count, 0, tbl_off);
1234
1235 if (ret > 1) {
1236 tbl_off += ret;
1237 sync_needed = true;
1238 }
1239
1240
1241 to_talitos_ptr(civ_ptr, edesc->iv_dma, ivsize, is_sec1);
1242
1243
1244 to_talitos_ptr(ckey_ptr, ctx->dma_key + ctx->authkeylen,
1245 ctx->enckeylen, is_sec1);
1246
1247
1248
1249
1250
1251
1252
1253 if (is_ipsec_esp && (desc->hdr & DESC_HDR_MODE1_MDEU_CICV))
1254 elen = authsize;
1255
1256 ret = talitos_sg_map_ext(dev, areq->src, cryptlen, edesc, &desc->ptr[4],
1257 sg_count, areq->assoclen, tbl_off, elen,
1258 false);
1259
1260 if (ret > 1) {
1261 tbl_off += ret;
1262 sync_needed = true;
1263 }
1264
1265
1266 if (areq->src != areq->dst) {
1267 sg_count = edesc->dst_nents ? : 1;
1268 if (!is_sec1 || sg_count == 1)
1269 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1270 }
1271
1272 if (is_ipsec_esp && encrypt)
1273 elen = authsize;
1274 else
1275 elen = 0;
1276 ret = talitos_sg_map_ext(dev, areq->dst, cryptlen, edesc, &desc->ptr[5],
1277 sg_count, areq->assoclen, tbl_off, elen,
1278 is_ipsec_esp && !encrypt);
1279 tbl_off += ret;
1280
1281 if (!encrypt && is_ipsec_esp) {
1282 struct talitos_ptr *tbl_ptr = &edesc->link_tbl[tbl_off];
1283
1284
1285 to_talitos_ptr_ext_set(tbl_ptr - 1, 0, is_sec1);
1286 to_talitos_ptr_ext_set(tbl_ptr, DESC_PTR_LNKTBL_RET, is_sec1);
1287
1288
1289 to_talitos_ptr(tbl_ptr, dma_icv, authsize, is_sec1);
1290 to_talitos_ptr_ext_or(&desc->ptr[5], authsize, is_sec1);
1291 sync_needed = true;
1292 } else if (!encrypt) {
1293 to_talitos_ptr(&desc->ptr[6], dma_icv, authsize, is_sec1);
1294 sync_needed = true;
1295 } else if (!is_ipsec_esp) {
1296 talitos_sg_map(dev, areq->dst, authsize, edesc, &desc->ptr[6],
1297 sg_count, areq->assoclen + cryptlen, tbl_off);
1298 }
1299
1300
1301 if (is_ipsec_esp)
1302 map_single_talitos_ptr(dev, &desc->ptr[6], ivsize, ctx->iv,
1303 DMA_FROM_DEVICE);
1304
1305 if (sync_needed)
1306 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1307 edesc->dma_len,
1308 DMA_BIDIRECTIONAL);
1309
1310 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1311 if (ret != -EINPROGRESS) {
1312 ipsec_esp_unmap(dev, edesc, areq, encrypt);
1313 kfree(edesc);
1314 }
1315 return ret;
1316 }
1317
1318
1319
1320
1321 static struct talitos_edesc *talitos_edesc_alloc(struct device *dev,
1322 struct scatterlist *src,
1323 struct scatterlist *dst,
1324 u8 *iv,
1325 unsigned int assoclen,
1326 unsigned int cryptlen,
1327 unsigned int authsize,
1328 unsigned int ivsize,
1329 int icv_stashing,
1330 u32 cryptoflags,
1331 bool encrypt)
1332 {
1333 struct talitos_edesc *edesc;
1334 int src_nents, dst_nents, alloc_len, dma_len, src_len, dst_len;
1335 dma_addr_t iv_dma = 0;
1336 gfp_t flags = cryptoflags & CRYPTO_TFM_REQ_MAY_SLEEP ? GFP_KERNEL :
1337 GFP_ATOMIC;
1338 struct talitos_private *priv = dev_get_drvdata(dev);
1339 bool is_sec1 = has_ftr_sec1(priv);
1340 int max_len = is_sec1 ? TALITOS1_MAX_DATA_LEN : TALITOS2_MAX_DATA_LEN;
1341
1342 if (cryptlen + authsize > max_len) {
1343 dev_err(dev, "length exceeds h/w max limit\n");
1344 return ERR_PTR(-EINVAL);
1345 }
1346
1347 if (!dst || dst == src) {
1348 src_len = assoclen + cryptlen + authsize;
1349 src_nents = sg_nents_for_len(src, src_len);
1350 if (src_nents < 0) {
1351 dev_err(dev, "Invalid number of src SG.\n");
1352 return ERR_PTR(-EINVAL);
1353 }
1354 src_nents = (src_nents == 1) ? 0 : src_nents;
1355 dst_nents = dst ? src_nents : 0;
1356 dst_len = 0;
1357 } else {
1358 src_len = assoclen + cryptlen + (encrypt ? 0 : authsize);
1359 src_nents = sg_nents_for_len(src, src_len);
1360 if (src_nents < 0) {
1361 dev_err(dev, "Invalid number of src SG.\n");
1362 return ERR_PTR(-EINVAL);
1363 }
1364 src_nents = (src_nents == 1) ? 0 : src_nents;
1365 dst_len = assoclen + cryptlen + (encrypt ? authsize : 0);
1366 dst_nents = sg_nents_for_len(dst, dst_len);
1367 if (dst_nents < 0) {
1368 dev_err(dev, "Invalid number of dst SG.\n");
1369 return ERR_PTR(-EINVAL);
1370 }
1371 dst_nents = (dst_nents == 1) ? 0 : dst_nents;
1372 }
1373
1374
1375
1376
1377
1378
1379 alloc_len = sizeof(struct talitos_edesc);
1380 if (src_nents || dst_nents || !encrypt) {
1381 if (is_sec1)
1382 dma_len = (src_nents ? src_len : 0) +
1383 (dst_nents ? dst_len : 0) + authsize;
1384 else
1385 dma_len = (src_nents + dst_nents + 2) *
1386 sizeof(struct talitos_ptr) + authsize;
1387 alloc_len += dma_len;
1388 } else {
1389 dma_len = 0;
1390 }
1391 alloc_len += icv_stashing ? authsize : 0;
1392
1393
1394 if (is_sec1 && !dst)
1395 alloc_len += sizeof(struct talitos_desc);
1396 alloc_len += ivsize;
1397
1398 edesc = kmalloc(alloc_len, GFP_DMA | flags);
1399 if (!edesc)
1400 return ERR_PTR(-ENOMEM);
1401 if (ivsize) {
1402 iv = memcpy(((u8 *)edesc) + alloc_len - ivsize, iv, ivsize);
1403 iv_dma = dma_map_single(dev, iv, ivsize, DMA_TO_DEVICE);
1404 }
1405 memset(&edesc->desc, 0, sizeof(edesc->desc));
1406
1407 edesc->src_nents = src_nents;
1408 edesc->dst_nents = dst_nents;
1409 edesc->iv_dma = iv_dma;
1410 edesc->dma_len = dma_len;
1411 if (dma_len)
1412 edesc->dma_link_tbl = dma_map_single(dev, &edesc->link_tbl[0],
1413 edesc->dma_len,
1414 DMA_BIDIRECTIONAL);
1415
1416 return edesc;
1417 }
1418
1419 static struct talitos_edesc *aead_edesc_alloc(struct aead_request *areq, u8 *iv,
1420 int icv_stashing, bool encrypt)
1421 {
1422 struct crypto_aead *authenc = crypto_aead_reqtfm(areq);
1423 unsigned int authsize = crypto_aead_authsize(authenc);
1424 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1425 unsigned int ivsize = crypto_aead_ivsize(authenc);
1426 unsigned int cryptlen = areq->cryptlen - (encrypt ? 0 : authsize);
1427
1428 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1429 iv, areq->assoclen, cryptlen,
1430 authsize, ivsize, icv_stashing,
1431 areq->base.flags, encrypt);
1432 }
1433
1434 static int aead_encrypt(struct aead_request *req)
1435 {
1436 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1437 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1438 struct talitos_edesc *edesc;
1439
1440
1441 edesc = aead_edesc_alloc(req, req->iv, 0, true);
1442 if (IS_ERR(edesc))
1443 return PTR_ERR(edesc);
1444
1445
1446 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1447
1448 return ipsec_esp(edesc, req, true, ipsec_esp_encrypt_done);
1449 }
1450
1451 static int aead_decrypt(struct aead_request *req)
1452 {
1453 struct crypto_aead *authenc = crypto_aead_reqtfm(req);
1454 unsigned int authsize = crypto_aead_authsize(authenc);
1455 struct talitos_ctx *ctx = crypto_aead_ctx(authenc);
1456 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1457 struct talitos_edesc *edesc;
1458 void *icvdata;
1459
1460
1461 edesc = aead_edesc_alloc(req, req->iv, 1, false);
1462 if (IS_ERR(edesc))
1463 return PTR_ERR(edesc);
1464
1465 if ((edesc->desc.hdr & DESC_HDR_TYPE_IPSEC_ESP) &&
1466 (priv->features & TALITOS_FTR_HW_AUTH_CHECK) &&
1467 ((!edesc->src_nents && !edesc->dst_nents) ||
1468 priv->features & TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT)) {
1469
1470
1471 edesc->desc.hdr = ctx->desc_hdr_template |
1472 DESC_HDR_DIR_INBOUND |
1473 DESC_HDR_MODE1_MDEU_CICV;
1474
1475
1476
1477 return ipsec_esp(edesc, req, false,
1478 ipsec_esp_decrypt_hwauth_done);
1479 }
1480
1481
1482 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1483
1484
1485 icvdata = edesc->buf + edesc->dma_len;
1486
1487 sg_pcopy_to_buffer(req->src, edesc->src_nents ? : 1, icvdata, authsize,
1488 req->assoclen + req->cryptlen - authsize);
1489
1490 return ipsec_esp(edesc, req, false, ipsec_esp_decrypt_swauth_done);
1491 }
1492
1493 static int ablkcipher_setkey(struct crypto_ablkcipher *cipher,
1494 const u8 *key, unsigned int keylen)
1495 {
1496 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1497 struct device *dev = ctx->dev;
1498
1499 if (ctx->keylen)
1500 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
1501
1502 memcpy(&ctx->key, key, keylen);
1503 ctx->keylen = keylen;
1504
1505 ctx->dma_key = dma_map_single(dev, ctx->key, keylen, DMA_TO_DEVICE);
1506
1507 return 0;
1508 }
1509
1510 static int ablkcipher_des_setkey(struct crypto_ablkcipher *cipher,
1511 const u8 *key, unsigned int keylen)
1512 {
1513 return verify_ablkcipher_des_key(cipher, key) ?:
1514 ablkcipher_setkey(cipher, key, keylen);
1515 }
1516
1517 static int ablkcipher_des3_setkey(struct crypto_ablkcipher *cipher,
1518 const u8 *key, unsigned int keylen)
1519 {
1520 return verify_ablkcipher_des3_key(cipher, key) ?:
1521 ablkcipher_setkey(cipher, key, keylen);
1522 }
1523
1524 static int ablkcipher_aes_setkey(struct crypto_ablkcipher *cipher,
1525 const u8 *key, unsigned int keylen)
1526 {
1527 if (keylen == AES_KEYSIZE_128 || keylen == AES_KEYSIZE_192 ||
1528 keylen == AES_KEYSIZE_256)
1529 return ablkcipher_setkey(cipher, key, keylen);
1530
1531 crypto_ablkcipher_set_flags(cipher, CRYPTO_TFM_RES_BAD_KEY_LEN);
1532
1533 return -EINVAL;
1534 }
1535
1536 static void common_nonsnoop_unmap(struct device *dev,
1537 struct talitos_edesc *edesc,
1538 struct ablkcipher_request *areq)
1539 {
1540 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1541
1542 talitos_sg_unmap(dev, edesc, areq->src, areq->dst, areq->nbytes, 0);
1543 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1], DMA_TO_DEVICE);
1544
1545 if (edesc->dma_len)
1546 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1547 DMA_BIDIRECTIONAL);
1548 }
1549
1550 static void ablkcipher_done(struct device *dev,
1551 struct talitos_desc *desc, void *context,
1552 int err)
1553 {
1554 struct ablkcipher_request *areq = context;
1555 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1556 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1557 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1558 struct talitos_edesc *edesc;
1559
1560 edesc = container_of(desc, struct talitos_edesc, desc);
1561
1562 common_nonsnoop_unmap(dev, edesc, areq);
1563 memcpy(areq->info, ctx->iv, ivsize);
1564
1565 kfree(edesc);
1566
1567 areq->base.complete(&areq->base, err);
1568 }
1569
1570 static int common_nonsnoop(struct talitos_edesc *edesc,
1571 struct ablkcipher_request *areq,
1572 void (*callback) (struct device *dev,
1573 struct talitos_desc *desc,
1574 void *context, int error))
1575 {
1576 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1577 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1578 struct device *dev = ctx->dev;
1579 struct talitos_desc *desc = &edesc->desc;
1580 unsigned int cryptlen = areq->nbytes;
1581 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1582 int sg_count, ret;
1583 bool sync_needed = false;
1584 struct talitos_private *priv = dev_get_drvdata(dev);
1585 bool is_sec1 = has_ftr_sec1(priv);
1586
1587
1588
1589
1590 to_talitos_ptr(&desc->ptr[1], edesc->iv_dma, ivsize, is_sec1);
1591
1592
1593 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen, is_sec1);
1594
1595 sg_count = edesc->src_nents ?: 1;
1596 if (is_sec1 && sg_count > 1)
1597 sg_copy_to_buffer(areq->src, sg_count, edesc->buf,
1598 cryptlen);
1599 else
1600 sg_count = dma_map_sg(dev, areq->src, sg_count,
1601 (areq->src == areq->dst) ?
1602 DMA_BIDIRECTIONAL : DMA_TO_DEVICE);
1603
1604
1605
1606 sg_count = talitos_sg_map(dev, areq->src, cryptlen, edesc,
1607 &desc->ptr[3], sg_count, 0, 0);
1608 if (sg_count > 1)
1609 sync_needed = true;
1610
1611
1612 if (areq->src != areq->dst) {
1613 sg_count = edesc->dst_nents ? : 1;
1614 if (!is_sec1 || sg_count == 1)
1615 dma_map_sg(dev, areq->dst, sg_count, DMA_FROM_DEVICE);
1616 }
1617
1618 ret = talitos_sg_map(dev, areq->dst, cryptlen, edesc, &desc->ptr[4],
1619 sg_count, 0, (edesc->src_nents + 1));
1620 if (ret > 1)
1621 sync_needed = true;
1622
1623
1624 map_single_talitos_ptr(dev, &desc->ptr[5], ivsize, ctx->iv,
1625 DMA_FROM_DEVICE);
1626
1627
1628
1629 if (sync_needed)
1630 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1631 edesc->dma_len, DMA_BIDIRECTIONAL);
1632
1633 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1634 if (ret != -EINPROGRESS) {
1635 common_nonsnoop_unmap(dev, edesc, areq);
1636 kfree(edesc);
1637 }
1638 return ret;
1639 }
1640
1641 static struct talitos_edesc *ablkcipher_edesc_alloc(struct ablkcipher_request *
1642 areq, bool encrypt)
1643 {
1644 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1645 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1646 unsigned int ivsize = crypto_ablkcipher_ivsize(cipher);
1647
1648 return talitos_edesc_alloc(ctx->dev, areq->src, areq->dst,
1649 areq->info, 0, areq->nbytes, 0, ivsize, 0,
1650 areq->base.flags, encrypt);
1651 }
1652
1653 static int ablkcipher_encrypt(struct ablkcipher_request *areq)
1654 {
1655 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1656 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1657 struct talitos_edesc *edesc;
1658 unsigned int blocksize =
1659 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1660
1661 if (!areq->nbytes)
1662 return 0;
1663
1664 if (areq->nbytes % blocksize)
1665 return -EINVAL;
1666
1667
1668 edesc = ablkcipher_edesc_alloc(areq, true);
1669 if (IS_ERR(edesc))
1670 return PTR_ERR(edesc);
1671
1672
1673 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_MODE0_ENCRYPT;
1674
1675 return common_nonsnoop(edesc, areq, ablkcipher_done);
1676 }
1677
1678 static int ablkcipher_decrypt(struct ablkcipher_request *areq)
1679 {
1680 struct crypto_ablkcipher *cipher = crypto_ablkcipher_reqtfm(areq);
1681 struct talitos_ctx *ctx = crypto_ablkcipher_ctx(cipher);
1682 struct talitos_edesc *edesc;
1683 unsigned int blocksize =
1684 crypto_tfm_alg_blocksize(crypto_ablkcipher_tfm(cipher));
1685
1686 if (!areq->nbytes)
1687 return 0;
1688
1689 if (areq->nbytes % blocksize)
1690 return -EINVAL;
1691
1692
1693 edesc = ablkcipher_edesc_alloc(areq, false);
1694 if (IS_ERR(edesc))
1695 return PTR_ERR(edesc);
1696
1697 edesc->desc.hdr = ctx->desc_hdr_template | DESC_HDR_DIR_INBOUND;
1698
1699 return common_nonsnoop(edesc, areq, ablkcipher_done);
1700 }
1701
1702 static void common_nonsnoop_hash_unmap(struct device *dev,
1703 struct talitos_edesc *edesc,
1704 struct ahash_request *areq)
1705 {
1706 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1707 struct talitos_private *priv = dev_get_drvdata(dev);
1708 bool is_sec1 = has_ftr_sec1(priv);
1709 struct talitos_desc *desc = &edesc->desc;
1710 struct talitos_desc *desc2 = (struct talitos_desc *)
1711 (edesc->buf + edesc->dma_len);
1712
1713 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[5], DMA_FROM_DEVICE);
1714 if (desc->next_desc &&
1715 desc->ptr[5].ptr != desc2->ptr[5].ptr)
1716 unmap_single_talitos_ptr(dev, &desc2->ptr[5], DMA_FROM_DEVICE);
1717
1718 if (req_ctx->psrc)
1719 talitos_sg_unmap(dev, edesc, req_ctx->psrc, NULL, 0, 0);
1720
1721
1722 if (from_talitos_ptr_len(&edesc->desc.ptr[1], is_sec1))
1723 unmap_single_talitos_ptr(dev, &edesc->desc.ptr[1],
1724 DMA_TO_DEVICE);
1725 else if (desc->next_desc)
1726 unmap_single_talitos_ptr(dev, &desc2->ptr[1],
1727 DMA_TO_DEVICE);
1728
1729 if (is_sec1 && req_ctx->nbuf)
1730 unmap_single_talitos_ptr(dev, &desc->ptr[3],
1731 DMA_TO_DEVICE);
1732
1733 if (edesc->dma_len)
1734 dma_unmap_single(dev, edesc->dma_link_tbl, edesc->dma_len,
1735 DMA_BIDIRECTIONAL);
1736
1737 if (edesc->desc.next_desc)
1738 dma_unmap_single(dev, be32_to_cpu(edesc->desc.next_desc),
1739 TALITOS_DESC_SIZE, DMA_BIDIRECTIONAL);
1740 }
1741
1742 static void ahash_done(struct device *dev,
1743 struct talitos_desc *desc, void *context,
1744 int err)
1745 {
1746 struct ahash_request *areq = context;
1747 struct talitos_edesc *edesc =
1748 container_of(desc, struct talitos_edesc, desc);
1749 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1750
1751 if (!req_ctx->last && req_ctx->to_hash_later) {
1752
1753 req_ctx->buf_idx = (req_ctx->buf_idx + 1) & 1;
1754 req_ctx->nbuf = req_ctx->to_hash_later;
1755 }
1756 common_nonsnoop_hash_unmap(dev, edesc, areq);
1757
1758 kfree(edesc);
1759
1760 areq->base.complete(&areq->base, err);
1761 }
1762
1763
1764
1765
1766
1767 static void talitos_handle_buggy_hash(struct talitos_ctx *ctx,
1768 struct talitos_edesc *edesc,
1769 struct talitos_ptr *ptr)
1770 {
1771 static u8 padded_hash[64] = {
1772 0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1773 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1774 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1775 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
1776 };
1777
1778 pr_err_once("Bug in SEC1, padding ourself\n");
1779 edesc->desc.hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1780 map_single_talitos_ptr(ctx->dev, ptr, sizeof(padded_hash),
1781 (char *)padded_hash, DMA_TO_DEVICE);
1782 }
1783
1784 static int common_nonsnoop_hash(struct talitos_edesc *edesc,
1785 struct ahash_request *areq, unsigned int length,
1786 void (*callback) (struct device *dev,
1787 struct talitos_desc *desc,
1788 void *context, int error))
1789 {
1790 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1791 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1792 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1793 struct device *dev = ctx->dev;
1794 struct talitos_desc *desc = &edesc->desc;
1795 int ret;
1796 bool sync_needed = false;
1797 struct talitos_private *priv = dev_get_drvdata(dev);
1798 bool is_sec1 = has_ftr_sec1(priv);
1799 int sg_count;
1800
1801
1802
1803
1804 if (!req_ctx->first || req_ctx->swinit) {
1805 map_single_talitos_ptr_nosync(dev, &desc->ptr[1],
1806 req_ctx->hw_context_size,
1807 req_ctx->hw_context,
1808 DMA_TO_DEVICE);
1809 req_ctx->swinit = 0;
1810 }
1811
1812 req_ctx->first = 0;
1813
1814
1815 if (ctx->keylen)
1816 to_talitos_ptr(&desc->ptr[2], ctx->dma_key, ctx->keylen,
1817 is_sec1);
1818
1819 if (is_sec1 && req_ctx->nbuf)
1820 length -= req_ctx->nbuf;
1821
1822 sg_count = edesc->src_nents ?: 1;
1823 if (is_sec1 && sg_count > 1)
1824 sg_copy_to_buffer(req_ctx->psrc, sg_count, edesc->buf, length);
1825 else if (length)
1826 sg_count = dma_map_sg(dev, req_ctx->psrc, sg_count,
1827 DMA_TO_DEVICE);
1828
1829
1830
1831 if (is_sec1 && req_ctx->nbuf) {
1832 map_single_talitos_ptr(dev, &desc->ptr[3], req_ctx->nbuf,
1833 req_ctx->buf[req_ctx->buf_idx],
1834 DMA_TO_DEVICE);
1835 } else {
1836 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1837 &desc->ptr[3], sg_count, 0, 0);
1838 if (sg_count > 1)
1839 sync_needed = true;
1840 }
1841
1842
1843
1844
1845 if (req_ctx->last)
1846 map_single_talitos_ptr(dev, &desc->ptr[5],
1847 crypto_ahash_digestsize(tfm),
1848 areq->result, DMA_FROM_DEVICE);
1849 else
1850 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1851 req_ctx->hw_context_size,
1852 req_ctx->hw_context,
1853 DMA_FROM_DEVICE);
1854
1855
1856
1857 if (is_sec1 && from_talitos_ptr_len(&desc->ptr[3], true) == 0)
1858 talitos_handle_buggy_hash(ctx, edesc, &desc->ptr[3]);
1859
1860 if (is_sec1 && req_ctx->nbuf && length) {
1861 struct talitos_desc *desc2 = (struct talitos_desc *)
1862 (edesc->buf + edesc->dma_len);
1863 dma_addr_t next_desc;
1864
1865 memset(desc2, 0, sizeof(*desc2));
1866 desc2->hdr = desc->hdr;
1867 desc2->hdr &= ~DESC_HDR_MODE0_MDEU_INIT;
1868 desc2->hdr1 = desc2->hdr;
1869 desc->hdr &= ~DESC_HDR_MODE0_MDEU_PAD;
1870 desc->hdr |= DESC_HDR_MODE0_MDEU_CONT;
1871 desc->hdr &= ~DESC_HDR_DONE_NOTIFY;
1872
1873 if (desc->ptr[1].ptr)
1874 copy_talitos_ptr(&desc2->ptr[1], &desc->ptr[1],
1875 is_sec1);
1876 else
1877 map_single_talitos_ptr_nosync(dev, &desc2->ptr[1],
1878 req_ctx->hw_context_size,
1879 req_ctx->hw_context,
1880 DMA_TO_DEVICE);
1881 copy_talitos_ptr(&desc2->ptr[2], &desc->ptr[2], is_sec1);
1882 sg_count = talitos_sg_map(dev, req_ctx->psrc, length, edesc,
1883 &desc2->ptr[3], sg_count, 0, 0);
1884 if (sg_count > 1)
1885 sync_needed = true;
1886 copy_talitos_ptr(&desc2->ptr[5], &desc->ptr[5], is_sec1);
1887 if (req_ctx->last)
1888 map_single_talitos_ptr_nosync(dev, &desc->ptr[5],
1889 req_ctx->hw_context_size,
1890 req_ctx->hw_context,
1891 DMA_FROM_DEVICE);
1892
1893 next_desc = dma_map_single(dev, &desc2->hdr1, TALITOS_DESC_SIZE,
1894 DMA_BIDIRECTIONAL);
1895 desc->next_desc = cpu_to_be32(next_desc);
1896 }
1897
1898 if (sync_needed)
1899 dma_sync_single_for_device(dev, edesc->dma_link_tbl,
1900 edesc->dma_len, DMA_BIDIRECTIONAL);
1901
1902 ret = talitos_submit(dev, ctx->ch, desc, callback, areq);
1903 if (ret != -EINPROGRESS) {
1904 common_nonsnoop_hash_unmap(dev, edesc, areq);
1905 kfree(edesc);
1906 }
1907 return ret;
1908 }
1909
1910 static struct talitos_edesc *ahash_edesc_alloc(struct ahash_request *areq,
1911 unsigned int nbytes)
1912 {
1913 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1914 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1915 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1916 struct talitos_private *priv = dev_get_drvdata(ctx->dev);
1917 bool is_sec1 = has_ftr_sec1(priv);
1918
1919 if (is_sec1)
1920 nbytes -= req_ctx->nbuf;
1921
1922 return talitos_edesc_alloc(ctx->dev, req_ctx->psrc, NULL, NULL, 0,
1923 nbytes, 0, 0, 0, areq->base.flags, false);
1924 }
1925
1926 static int ahash_init(struct ahash_request *areq)
1927 {
1928 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1929 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1930 struct device *dev = ctx->dev;
1931 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1932 unsigned int size;
1933 dma_addr_t dma;
1934
1935
1936 req_ctx->buf_idx = 0;
1937 req_ctx->nbuf = 0;
1938 req_ctx->first = 1;
1939 req_ctx->swinit = 0;
1940 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
1941 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
1942 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
1943 req_ctx->hw_context_size = size;
1944
1945 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
1946 DMA_TO_DEVICE);
1947 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
1948
1949 return 0;
1950 }
1951
1952
1953
1954
1955
1956 static int ahash_init_sha224_swinit(struct ahash_request *areq)
1957 {
1958 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1959
1960 req_ctx->hw_context[0] = SHA224_H0;
1961 req_ctx->hw_context[1] = SHA224_H1;
1962 req_ctx->hw_context[2] = SHA224_H2;
1963 req_ctx->hw_context[3] = SHA224_H3;
1964 req_ctx->hw_context[4] = SHA224_H4;
1965 req_ctx->hw_context[5] = SHA224_H5;
1966 req_ctx->hw_context[6] = SHA224_H6;
1967 req_ctx->hw_context[7] = SHA224_H7;
1968
1969
1970 req_ctx->hw_context[8] = 0;
1971 req_ctx->hw_context[9] = 0;
1972
1973 ahash_init(areq);
1974 req_ctx->swinit = 1;
1975
1976 return 0;
1977 }
1978
1979 static int ahash_process_req(struct ahash_request *areq, unsigned int nbytes)
1980 {
1981 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
1982 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
1983 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
1984 struct talitos_edesc *edesc;
1985 unsigned int blocksize =
1986 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
1987 unsigned int nbytes_to_hash;
1988 unsigned int to_hash_later;
1989 unsigned int nsg;
1990 int nents;
1991 struct device *dev = ctx->dev;
1992 struct talitos_private *priv = dev_get_drvdata(dev);
1993 bool is_sec1 = has_ftr_sec1(priv);
1994 u8 *ctx_buf = req_ctx->buf[req_ctx->buf_idx];
1995
1996 if (!req_ctx->last && (nbytes + req_ctx->nbuf <= blocksize)) {
1997
1998 nents = sg_nents_for_len(areq->src, nbytes);
1999 if (nents < 0) {
2000 dev_err(ctx->dev, "Invalid number of src SG.\n");
2001 return nents;
2002 }
2003 sg_copy_to_buffer(areq->src, nents,
2004 ctx_buf + req_ctx->nbuf, nbytes);
2005 req_ctx->nbuf += nbytes;
2006 return 0;
2007 }
2008
2009
2010 nbytes_to_hash = nbytes + req_ctx->nbuf;
2011 to_hash_later = nbytes_to_hash & (blocksize - 1);
2012
2013 if (req_ctx->last)
2014 to_hash_later = 0;
2015 else if (to_hash_later)
2016
2017 nbytes_to_hash -= to_hash_later;
2018 else {
2019
2020 nbytes_to_hash -= blocksize;
2021 to_hash_later = blocksize;
2022 }
2023
2024
2025 if (!is_sec1 && req_ctx->nbuf) {
2026 nsg = (req_ctx->nbuf < nbytes_to_hash) ? 2 : 1;
2027 sg_init_table(req_ctx->bufsl, nsg);
2028 sg_set_buf(req_ctx->bufsl, ctx_buf, req_ctx->nbuf);
2029 if (nsg > 1)
2030 sg_chain(req_ctx->bufsl, 2, areq->src);
2031 req_ctx->psrc = req_ctx->bufsl;
2032 } else if (is_sec1 && req_ctx->nbuf && req_ctx->nbuf < blocksize) {
2033 int offset;
2034
2035 if (nbytes_to_hash > blocksize)
2036 offset = blocksize - req_ctx->nbuf;
2037 else
2038 offset = nbytes_to_hash - req_ctx->nbuf;
2039 nents = sg_nents_for_len(areq->src, offset);
2040 if (nents < 0) {
2041 dev_err(ctx->dev, "Invalid number of src SG.\n");
2042 return nents;
2043 }
2044 sg_copy_to_buffer(areq->src, nents,
2045 ctx_buf + req_ctx->nbuf, offset);
2046 req_ctx->nbuf += offset;
2047 req_ctx->psrc = scatterwalk_ffwd(req_ctx->bufsl, areq->src,
2048 offset);
2049 } else
2050 req_ctx->psrc = areq->src;
2051
2052 if (to_hash_later) {
2053 nents = sg_nents_for_len(areq->src, nbytes);
2054 if (nents < 0) {
2055 dev_err(ctx->dev, "Invalid number of src SG.\n");
2056 return nents;
2057 }
2058 sg_pcopy_to_buffer(areq->src, nents,
2059 req_ctx->buf[(req_ctx->buf_idx + 1) & 1],
2060 to_hash_later,
2061 nbytes - to_hash_later);
2062 }
2063 req_ctx->to_hash_later = to_hash_later;
2064
2065
2066 edesc = ahash_edesc_alloc(areq, nbytes_to_hash);
2067 if (IS_ERR(edesc))
2068 return PTR_ERR(edesc);
2069
2070 edesc->desc.hdr = ctx->desc_hdr_template;
2071
2072
2073 if (req_ctx->last)
2074 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_PAD;
2075 else
2076 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_CONT;
2077
2078
2079 if (req_ctx->first && !req_ctx->swinit)
2080 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_INIT;
2081
2082
2083
2084
2085 if (ctx->keylen && (req_ctx->first || req_ctx->last))
2086 edesc->desc.hdr |= DESC_HDR_MODE0_MDEU_HMAC;
2087
2088 return common_nonsnoop_hash(edesc, areq, nbytes_to_hash, ahash_done);
2089 }
2090
2091 static int ahash_update(struct ahash_request *areq)
2092 {
2093 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2094
2095 req_ctx->last = 0;
2096
2097 return ahash_process_req(areq, areq->nbytes);
2098 }
2099
2100 static int ahash_final(struct ahash_request *areq)
2101 {
2102 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2103
2104 req_ctx->last = 1;
2105
2106 return ahash_process_req(areq, 0);
2107 }
2108
2109 static int ahash_finup(struct ahash_request *areq)
2110 {
2111 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2112
2113 req_ctx->last = 1;
2114
2115 return ahash_process_req(areq, areq->nbytes);
2116 }
2117
2118 static int ahash_digest(struct ahash_request *areq)
2119 {
2120 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2121 struct crypto_ahash *ahash = crypto_ahash_reqtfm(areq);
2122
2123 ahash->init(areq);
2124 req_ctx->last = 1;
2125
2126 return ahash_process_req(areq, areq->nbytes);
2127 }
2128
2129 static int ahash_export(struct ahash_request *areq, void *out)
2130 {
2131 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2132 struct talitos_export_state *export = out;
2133 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2134 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2135 struct device *dev = ctx->dev;
2136 dma_addr_t dma;
2137
2138 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2139 DMA_FROM_DEVICE);
2140 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_FROM_DEVICE);
2141
2142 memcpy(export->hw_context, req_ctx->hw_context,
2143 req_ctx->hw_context_size);
2144 memcpy(export->buf, req_ctx->buf[req_ctx->buf_idx], req_ctx->nbuf);
2145 export->swinit = req_ctx->swinit;
2146 export->first = req_ctx->first;
2147 export->last = req_ctx->last;
2148 export->to_hash_later = req_ctx->to_hash_later;
2149 export->nbuf = req_ctx->nbuf;
2150
2151 return 0;
2152 }
2153
2154 static int ahash_import(struct ahash_request *areq, const void *in)
2155 {
2156 struct talitos_ahash_req_ctx *req_ctx = ahash_request_ctx(areq);
2157 struct crypto_ahash *tfm = crypto_ahash_reqtfm(areq);
2158 struct talitos_ctx *ctx = crypto_ahash_ctx(tfm);
2159 struct device *dev = ctx->dev;
2160 const struct talitos_export_state *export = in;
2161 unsigned int size;
2162 dma_addr_t dma;
2163
2164 memset(req_ctx, 0, sizeof(*req_ctx));
2165 size = (crypto_ahash_digestsize(tfm) <= SHA256_DIGEST_SIZE)
2166 ? TALITOS_MDEU_CONTEXT_SIZE_MD5_SHA1_SHA256
2167 : TALITOS_MDEU_CONTEXT_SIZE_SHA384_SHA512;
2168 req_ctx->hw_context_size = size;
2169 memcpy(req_ctx->hw_context, export->hw_context, size);
2170 memcpy(req_ctx->buf[0], export->buf, export->nbuf);
2171 req_ctx->swinit = export->swinit;
2172 req_ctx->first = export->first;
2173 req_ctx->last = export->last;
2174 req_ctx->to_hash_later = export->to_hash_later;
2175 req_ctx->nbuf = export->nbuf;
2176
2177 dma = dma_map_single(dev, req_ctx->hw_context, req_ctx->hw_context_size,
2178 DMA_TO_DEVICE);
2179 dma_unmap_single(dev, dma, req_ctx->hw_context_size, DMA_TO_DEVICE);
2180
2181 return 0;
2182 }
2183
2184 static int keyhash(struct crypto_ahash *tfm, const u8 *key, unsigned int keylen,
2185 u8 *hash)
2186 {
2187 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2188
2189 struct scatterlist sg[1];
2190 struct ahash_request *req;
2191 struct crypto_wait wait;
2192 int ret;
2193
2194 crypto_init_wait(&wait);
2195
2196 req = ahash_request_alloc(tfm, GFP_KERNEL);
2197 if (!req)
2198 return -ENOMEM;
2199
2200
2201 ctx->keylen = 0;
2202 ahash_request_set_callback(req, CRYPTO_TFM_REQ_MAY_BACKLOG,
2203 crypto_req_done, &wait);
2204
2205 sg_init_one(&sg[0], key, keylen);
2206
2207 ahash_request_set_crypt(req, sg, hash, keylen);
2208 ret = crypto_wait_req(crypto_ahash_digest(req), &wait);
2209
2210 ahash_request_free(req);
2211
2212 return ret;
2213 }
2214
2215 static int ahash_setkey(struct crypto_ahash *tfm, const u8 *key,
2216 unsigned int keylen)
2217 {
2218 struct talitos_ctx *ctx = crypto_tfm_ctx(crypto_ahash_tfm(tfm));
2219 struct device *dev = ctx->dev;
2220 unsigned int blocksize =
2221 crypto_tfm_alg_blocksize(crypto_ahash_tfm(tfm));
2222 unsigned int digestsize = crypto_ahash_digestsize(tfm);
2223 unsigned int keysize = keylen;
2224 u8 hash[SHA512_DIGEST_SIZE];
2225 int ret;
2226
2227 if (keylen <= blocksize)
2228 memcpy(ctx->key, key, keysize);
2229 else {
2230
2231 ret = keyhash(tfm, key, keylen, hash);
2232
2233 if (ret) {
2234 crypto_ahash_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
2235 return -EINVAL;
2236 }
2237
2238 keysize = digestsize;
2239 memcpy(ctx->key, hash, digestsize);
2240 }
2241
2242 if (ctx->keylen)
2243 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
2244
2245 ctx->keylen = keysize;
2246 ctx->dma_key = dma_map_single(dev, ctx->key, keysize, DMA_TO_DEVICE);
2247
2248 return 0;
2249 }
2250
2251
2252 struct talitos_alg_template {
2253 u32 type;
2254 u32 priority;
2255 union {
2256 struct crypto_alg crypto;
2257 struct ahash_alg hash;
2258 struct aead_alg aead;
2259 } alg;
2260 __be32 desc_hdr_template;
2261 };
2262
2263 static struct talitos_alg_template driver_algs[] = {
2264
2265 { .type = CRYPTO_ALG_TYPE_AEAD,
2266 .alg.aead = {
2267 .base = {
2268 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2269 .cra_driver_name = "authenc-hmac-sha1-"
2270 "cbc-aes-talitos",
2271 .cra_blocksize = AES_BLOCK_SIZE,
2272 .cra_flags = CRYPTO_ALG_ASYNC,
2273 },
2274 .ivsize = AES_BLOCK_SIZE,
2275 .maxauthsize = SHA1_DIGEST_SIZE,
2276 },
2277 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2278 DESC_HDR_SEL0_AESU |
2279 DESC_HDR_MODE0_AESU_CBC |
2280 DESC_HDR_SEL1_MDEUA |
2281 DESC_HDR_MODE1_MDEU_INIT |
2282 DESC_HDR_MODE1_MDEU_PAD |
2283 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2284 },
2285 { .type = CRYPTO_ALG_TYPE_AEAD,
2286 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2287 .alg.aead = {
2288 .base = {
2289 .cra_name = "authenc(hmac(sha1),cbc(aes))",
2290 .cra_driver_name = "authenc-hmac-sha1-"
2291 "cbc-aes-talitos-hsna",
2292 .cra_blocksize = AES_BLOCK_SIZE,
2293 .cra_flags = CRYPTO_ALG_ASYNC,
2294 },
2295 .ivsize = AES_BLOCK_SIZE,
2296 .maxauthsize = SHA1_DIGEST_SIZE,
2297 },
2298 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2299 DESC_HDR_SEL0_AESU |
2300 DESC_HDR_MODE0_AESU_CBC |
2301 DESC_HDR_SEL1_MDEUA |
2302 DESC_HDR_MODE1_MDEU_INIT |
2303 DESC_HDR_MODE1_MDEU_PAD |
2304 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2305 },
2306 { .type = CRYPTO_ALG_TYPE_AEAD,
2307 .alg.aead = {
2308 .base = {
2309 .cra_name = "authenc(hmac(sha1),"
2310 "cbc(des3_ede))",
2311 .cra_driver_name = "authenc-hmac-sha1-"
2312 "cbc-3des-talitos",
2313 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2314 .cra_flags = CRYPTO_ALG_ASYNC,
2315 },
2316 .ivsize = DES3_EDE_BLOCK_SIZE,
2317 .maxauthsize = SHA1_DIGEST_SIZE,
2318 .setkey = aead_des3_setkey,
2319 },
2320 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2321 DESC_HDR_SEL0_DEU |
2322 DESC_HDR_MODE0_DEU_CBC |
2323 DESC_HDR_MODE0_DEU_3DES |
2324 DESC_HDR_SEL1_MDEUA |
2325 DESC_HDR_MODE1_MDEU_INIT |
2326 DESC_HDR_MODE1_MDEU_PAD |
2327 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2328 },
2329 { .type = CRYPTO_ALG_TYPE_AEAD,
2330 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2331 .alg.aead = {
2332 .base = {
2333 .cra_name = "authenc(hmac(sha1),"
2334 "cbc(des3_ede))",
2335 .cra_driver_name = "authenc-hmac-sha1-"
2336 "cbc-3des-talitos-hsna",
2337 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2338 .cra_flags = CRYPTO_ALG_ASYNC,
2339 },
2340 .ivsize = DES3_EDE_BLOCK_SIZE,
2341 .maxauthsize = SHA1_DIGEST_SIZE,
2342 .setkey = aead_des3_setkey,
2343 },
2344 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2345 DESC_HDR_SEL0_DEU |
2346 DESC_HDR_MODE0_DEU_CBC |
2347 DESC_HDR_MODE0_DEU_3DES |
2348 DESC_HDR_SEL1_MDEUA |
2349 DESC_HDR_MODE1_MDEU_INIT |
2350 DESC_HDR_MODE1_MDEU_PAD |
2351 DESC_HDR_MODE1_MDEU_SHA1_HMAC,
2352 },
2353 { .type = CRYPTO_ALG_TYPE_AEAD,
2354 .alg.aead = {
2355 .base = {
2356 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2357 .cra_driver_name = "authenc-hmac-sha224-"
2358 "cbc-aes-talitos",
2359 .cra_blocksize = AES_BLOCK_SIZE,
2360 .cra_flags = CRYPTO_ALG_ASYNC,
2361 },
2362 .ivsize = AES_BLOCK_SIZE,
2363 .maxauthsize = SHA224_DIGEST_SIZE,
2364 },
2365 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2366 DESC_HDR_SEL0_AESU |
2367 DESC_HDR_MODE0_AESU_CBC |
2368 DESC_HDR_SEL1_MDEUA |
2369 DESC_HDR_MODE1_MDEU_INIT |
2370 DESC_HDR_MODE1_MDEU_PAD |
2371 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2372 },
2373 { .type = CRYPTO_ALG_TYPE_AEAD,
2374 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2375 .alg.aead = {
2376 .base = {
2377 .cra_name = "authenc(hmac(sha224),cbc(aes))",
2378 .cra_driver_name = "authenc-hmac-sha224-"
2379 "cbc-aes-talitos-hsna",
2380 .cra_blocksize = AES_BLOCK_SIZE,
2381 .cra_flags = CRYPTO_ALG_ASYNC,
2382 },
2383 .ivsize = AES_BLOCK_SIZE,
2384 .maxauthsize = SHA224_DIGEST_SIZE,
2385 },
2386 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2387 DESC_HDR_SEL0_AESU |
2388 DESC_HDR_MODE0_AESU_CBC |
2389 DESC_HDR_SEL1_MDEUA |
2390 DESC_HDR_MODE1_MDEU_INIT |
2391 DESC_HDR_MODE1_MDEU_PAD |
2392 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2393 },
2394 { .type = CRYPTO_ALG_TYPE_AEAD,
2395 .alg.aead = {
2396 .base = {
2397 .cra_name = "authenc(hmac(sha224),"
2398 "cbc(des3_ede))",
2399 .cra_driver_name = "authenc-hmac-sha224-"
2400 "cbc-3des-talitos",
2401 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2402 .cra_flags = CRYPTO_ALG_ASYNC,
2403 },
2404 .ivsize = DES3_EDE_BLOCK_SIZE,
2405 .maxauthsize = SHA224_DIGEST_SIZE,
2406 .setkey = aead_des3_setkey,
2407 },
2408 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2409 DESC_HDR_SEL0_DEU |
2410 DESC_HDR_MODE0_DEU_CBC |
2411 DESC_HDR_MODE0_DEU_3DES |
2412 DESC_HDR_SEL1_MDEUA |
2413 DESC_HDR_MODE1_MDEU_INIT |
2414 DESC_HDR_MODE1_MDEU_PAD |
2415 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2416 },
2417 { .type = CRYPTO_ALG_TYPE_AEAD,
2418 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2419 .alg.aead = {
2420 .base = {
2421 .cra_name = "authenc(hmac(sha224),"
2422 "cbc(des3_ede))",
2423 .cra_driver_name = "authenc-hmac-sha224-"
2424 "cbc-3des-talitos-hsna",
2425 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2426 .cra_flags = CRYPTO_ALG_ASYNC,
2427 },
2428 .ivsize = DES3_EDE_BLOCK_SIZE,
2429 .maxauthsize = SHA224_DIGEST_SIZE,
2430 .setkey = aead_des3_setkey,
2431 },
2432 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2433 DESC_HDR_SEL0_DEU |
2434 DESC_HDR_MODE0_DEU_CBC |
2435 DESC_HDR_MODE0_DEU_3DES |
2436 DESC_HDR_SEL1_MDEUA |
2437 DESC_HDR_MODE1_MDEU_INIT |
2438 DESC_HDR_MODE1_MDEU_PAD |
2439 DESC_HDR_MODE1_MDEU_SHA224_HMAC,
2440 },
2441 { .type = CRYPTO_ALG_TYPE_AEAD,
2442 .alg.aead = {
2443 .base = {
2444 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2445 .cra_driver_name = "authenc-hmac-sha256-"
2446 "cbc-aes-talitos",
2447 .cra_blocksize = AES_BLOCK_SIZE,
2448 .cra_flags = CRYPTO_ALG_ASYNC,
2449 },
2450 .ivsize = AES_BLOCK_SIZE,
2451 .maxauthsize = SHA256_DIGEST_SIZE,
2452 },
2453 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2454 DESC_HDR_SEL0_AESU |
2455 DESC_HDR_MODE0_AESU_CBC |
2456 DESC_HDR_SEL1_MDEUA |
2457 DESC_HDR_MODE1_MDEU_INIT |
2458 DESC_HDR_MODE1_MDEU_PAD |
2459 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2460 },
2461 { .type = CRYPTO_ALG_TYPE_AEAD,
2462 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2463 .alg.aead = {
2464 .base = {
2465 .cra_name = "authenc(hmac(sha256),cbc(aes))",
2466 .cra_driver_name = "authenc-hmac-sha256-"
2467 "cbc-aes-talitos-hsna",
2468 .cra_blocksize = AES_BLOCK_SIZE,
2469 .cra_flags = CRYPTO_ALG_ASYNC,
2470 },
2471 .ivsize = AES_BLOCK_SIZE,
2472 .maxauthsize = SHA256_DIGEST_SIZE,
2473 },
2474 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2475 DESC_HDR_SEL0_AESU |
2476 DESC_HDR_MODE0_AESU_CBC |
2477 DESC_HDR_SEL1_MDEUA |
2478 DESC_HDR_MODE1_MDEU_INIT |
2479 DESC_HDR_MODE1_MDEU_PAD |
2480 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2481 },
2482 { .type = CRYPTO_ALG_TYPE_AEAD,
2483 .alg.aead = {
2484 .base = {
2485 .cra_name = "authenc(hmac(sha256),"
2486 "cbc(des3_ede))",
2487 .cra_driver_name = "authenc-hmac-sha256-"
2488 "cbc-3des-talitos",
2489 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2490 .cra_flags = CRYPTO_ALG_ASYNC,
2491 },
2492 .ivsize = DES3_EDE_BLOCK_SIZE,
2493 .maxauthsize = SHA256_DIGEST_SIZE,
2494 .setkey = aead_des3_setkey,
2495 },
2496 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2497 DESC_HDR_SEL0_DEU |
2498 DESC_HDR_MODE0_DEU_CBC |
2499 DESC_HDR_MODE0_DEU_3DES |
2500 DESC_HDR_SEL1_MDEUA |
2501 DESC_HDR_MODE1_MDEU_INIT |
2502 DESC_HDR_MODE1_MDEU_PAD |
2503 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2504 },
2505 { .type = CRYPTO_ALG_TYPE_AEAD,
2506 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2507 .alg.aead = {
2508 .base = {
2509 .cra_name = "authenc(hmac(sha256),"
2510 "cbc(des3_ede))",
2511 .cra_driver_name = "authenc-hmac-sha256-"
2512 "cbc-3des-talitos-hsna",
2513 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2514 .cra_flags = CRYPTO_ALG_ASYNC,
2515 },
2516 .ivsize = DES3_EDE_BLOCK_SIZE,
2517 .maxauthsize = SHA256_DIGEST_SIZE,
2518 .setkey = aead_des3_setkey,
2519 },
2520 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2521 DESC_HDR_SEL0_DEU |
2522 DESC_HDR_MODE0_DEU_CBC |
2523 DESC_HDR_MODE0_DEU_3DES |
2524 DESC_HDR_SEL1_MDEUA |
2525 DESC_HDR_MODE1_MDEU_INIT |
2526 DESC_HDR_MODE1_MDEU_PAD |
2527 DESC_HDR_MODE1_MDEU_SHA256_HMAC,
2528 },
2529 { .type = CRYPTO_ALG_TYPE_AEAD,
2530 .alg.aead = {
2531 .base = {
2532 .cra_name = "authenc(hmac(sha384),cbc(aes))",
2533 .cra_driver_name = "authenc-hmac-sha384-"
2534 "cbc-aes-talitos",
2535 .cra_blocksize = AES_BLOCK_SIZE,
2536 .cra_flags = CRYPTO_ALG_ASYNC,
2537 },
2538 .ivsize = AES_BLOCK_SIZE,
2539 .maxauthsize = SHA384_DIGEST_SIZE,
2540 },
2541 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2542 DESC_HDR_SEL0_AESU |
2543 DESC_HDR_MODE0_AESU_CBC |
2544 DESC_HDR_SEL1_MDEUB |
2545 DESC_HDR_MODE1_MDEU_INIT |
2546 DESC_HDR_MODE1_MDEU_PAD |
2547 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2548 },
2549 { .type = CRYPTO_ALG_TYPE_AEAD,
2550 .alg.aead = {
2551 .base = {
2552 .cra_name = "authenc(hmac(sha384),"
2553 "cbc(des3_ede))",
2554 .cra_driver_name = "authenc-hmac-sha384-"
2555 "cbc-3des-talitos",
2556 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2557 .cra_flags = CRYPTO_ALG_ASYNC,
2558 },
2559 .ivsize = DES3_EDE_BLOCK_SIZE,
2560 .maxauthsize = SHA384_DIGEST_SIZE,
2561 .setkey = aead_des3_setkey,
2562 },
2563 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2564 DESC_HDR_SEL0_DEU |
2565 DESC_HDR_MODE0_DEU_CBC |
2566 DESC_HDR_MODE0_DEU_3DES |
2567 DESC_HDR_SEL1_MDEUB |
2568 DESC_HDR_MODE1_MDEU_INIT |
2569 DESC_HDR_MODE1_MDEU_PAD |
2570 DESC_HDR_MODE1_MDEUB_SHA384_HMAC,
2571 },
2572 { .type = CRYPTO_ALG_TYPE_AEAD,
2573 .alg.aead = {
2574 .base = {
2575 .cra_name = "authenc(hmac(sha512),cbc(aes))",
2576 .cra_driver_name = "authenc-hmac-sha512-"
2577 "cbc-aes-talitos",
2578 .cra_blocksize = AES_BLOCK_SIZE,
2579 .cra_flags = CRYPTO_ALG_ASYNC,
2580 },
2581 .ivsize = AES_BLOCK_SIZE,
2582 .maxauthsize = SHA512_DIGEST_SIZE,
2583 },
2584 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2585 DESC_HDR_SEL0_AESU |
2586 DESC_HDR_MODE0_AESU_CBC |
2587 DESC_HDR_SEL1_MDEUB |
2588 DESC_HDR_MODE1_MDEU_INIT |
2589 DESC_HDR_MODE1_MDEU_PAD |
2590 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2591 },
2592 { .type = CRYPTO_ALG_TYPE_AEAD,
2593 .alg.aead = {
2594 .base = {
2595 .cra_name = "authenc(hmac(sha512),"
2596 "cbc(des3_ede))",
2597 .cra_driver_name = "authenc-hmac-sha512-"
2598 "cbc-3des-talitos",
2599 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2600 .cra_flags = CRYPTO_ALG_ASYNC,
2601 },
2602 .ivsize = DES3_EDE_BLOCK_SIZE,
2603 .maxauthsize = SHA512_DIGEST_SIZE,
2604 .setkey = aead_des3_setkey,
2605 },
2606 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2607 DESC_HDR_SEL0_DEU |
2608 DESC_HDR_MODE0_DEU_CBC |
2609 DESC_HDR_MODE0_DEU_3DES |
2610 DESC_HDR_SEL1_MDEUB |
2611 DESC_HDR_MODE1_MDEU_INIT |
2612 DESC_HDR_MODE1_MDEU_PAD |
2613 DESC_HDR_MODE1_MDEUB_SHA512_HMAC,
2614 },
2615 { .type = CRYPTO_ALG_TYPE_AEAD,
2616 .alg.aead = {
2617 .base = {
2618 .cra_name = "authenc(hmac(md5),cbc(aes))",
2619 .cra_driver_name = "authenc-hmac-md5-"
2620 "cbc-aes-talitos",
2621 .cra_blocksize = AES_BLOCK_SIZE,
2622 .cra_flags = CRYPTO_ALG_ASYNC,
2623 },
2624 .ivsize = AES_BLOCK_SIZE,
2625 .maxauthsize = MD5_DIGEST_SIZE,
2626 },
2627 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2628 DESC_HDR_SEL0_AESU |
2629 DESC_HDR_MODE0_AESU_CBC |
2630 DESC_HDR_SEL1_MDEUA |
2631 DESC_HDR_MODE1_MDEU_INIT |
2632 DESC_HDR_MODE1_MDEU_PAD |
2633 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2634 },
2635 { .type = CRYPTO_ALG_TYPE_AEAD,
2636 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2637 .alg.aead = {
2638 .base = {
2639 .cra_name = "authenc(hmac(md5),cbc(aes))",
2640 .cra_driver_name = "authenc-hmac-md5-"
2641 "cbc-aes-talitos-hsna",
2642 .cra_blocksize = AES_BLOCK_SIZE,
2643 .cra_flags = CRYPTO_ALG_ASYNC,
2644 },
2645 .ivsize = AES_BLOCK_SIZE,
2646 .maxauthsize = MD5_DIGEST_SIZE,
2647 },
2648 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2649 DESC_HDR_SEL0_AESU |
2650 DESC_HDR_MODE0_AESU_CBC |
2651 DESC_HDR_SEL1_MDEUA |
2652 DESC_HDR_MODE1_MDEU_INIT |
2653 DESC_HDR_MODE1_MDEU_PAD |
2654 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2655 },
2656 { .type = CRYPTO_ALG_TYPE_AEAD,
2657 .alg.aead = {
2658 .base = {
2659 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2660 .cra_driver_name = "authenc-hmac-md5-"
2661 "cbc-3des-talitos",
2662 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2663 .cra_flags = CRYPTO_ALG_ASYNC,
2664 },
2665 .ivsize = DES3_EDE_BLOCK_SIZE,
2666 .maxauthsize = MD5_DIGEST_SIZE,
2667 .setkey = aead_des3_setkey,
2668 },
2669 .desc_hdr_template = DESC_HDR_TYPE_IPSEC_ESP |
2670 DESC_HDR_SEL0_DEU |
2671 DESC_HDR_MODE0_DEU_CBC |
2672 DESC_HDR_MODE0_DEU_3DES |
2673 DESC_HDR_SEL1_MDEUA |
2674 DESC_HDR_MODE1_MDEU_INIT |
2675 DESC_HDR_MODE1_MDEU_PAD |
2676 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2677 },
2678 { .type = CRYPTO_ALG_TYPE_AEAD,
2679 .priority = TALITOS_CRA_PRIORITY_AEAD_HSNA,
2680 .alg.aead = {
2681 .base = {
2682 .cra_name = "authenc(hmac(md5),cbc(des3_ede))",
2683 .cra_driver_name = "authenc-hmac-md5-"
2684 "cbc-3des-talitos-hsna",
2685 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2686 .cra_flags = CRYPTO_ALG_ASYNC,
2687 },
2688 .ivsize = DES3_EDE_BLOCK_SIZE,
2689 .maxauthsize = MD5_DIGEST_SIZE,
2690 .setkey = aead_des3_setkey,
2691 },
2692 .desc_hdr_template = DESC_HDR_TYPE_HMAC_SNOOP_NO_AFEU |
2693 DESC_HDR_SEL0_DEU |
2694 DESC_HDR_MODE0_DEU_CBC |
2695 DESC_HDR_MODE0_DEU_3DES |
2696 DESC_HDR_SEL1_MDEUA |
2697 DESC_HDR_MODE1_MDEU_INIT |
2698 DESC_HDR_MODE1_MDEU_PAD |
2699 DESC_HDR_MODE1_MDEU_MD5_HMAC,
2700 },
2701
2702 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2703 .alg.crypto = {
2704 .cra_name = "ecb(aes)",
2705 .cra_driver_name = "ecb-aes-talitos",
2706 .cra_blocksize = AES_BLOCK_SIZE,
2707 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2708 CRYPTO_ALG_ASYNC,
2709 .cra_ablkcipher = {
2710 .min_keysize = AES_MIN_KEY_SIZE,
2711 .max_keysize = AES_MAX_KEY_SIZE,
2712 .setkey = ablkcipher_aes_setkey,
2713 }
2714 },
2715 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2716 DESC_HDR_SEL0_AESU,
2717 },
2718 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2719 .alg.crypto = {
2720 .cra_name = "cbc(aes)",
2721 .cra_driver_name = "cbc-aes-talitos",
2722 .cra_blocksize = AES_BLOCK_SIZE,
2723 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2724 CRYPTO_ALG_ASYNC,
2725 .cra_ablkcipher = {
2726 .min_keysize = AES_MIN_KEY_SIZE,
2727 .max_keysize = AES_MAX_KEY_SIZE,
2728 .ivsize = AES_BLOCK_SIZE,
2729 .setkey = ablkcipher_aes_setkey,
2730 }
2731 },
2732 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2733 DESC_HDR_SEL0_AESU |
2734 DESC_HDR_MODE0_AESU_CBC,
2735 },
2736 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2737 .alg.crypto = {
2738 .cra_name = "ctr(aes)",
2739 .cra_driver_name = "ctr-aes-talitos",
2740 .cra_blocksize = 1,
2741 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2742 CRYPTO_ALG_ASYNC,
2743 .cra_ablkcipher = {
2744 .min_keysize = AES_MIN_KEY_SIZE,
2745 .max_keysize = AES_MAX_KEY_SIZE,
2746 .ivsize = AES_BLOCK_SIZE,
2747 .setkey = ablkcipher_aes_setkey,
2748 }
2749 },
2750 .desc_hdr_template = DESC_HDR_TYPE_AESU_CTR_NONSNOOP |
2751 DESC_HDR_SEL0_AESU |
2752 DESC_HDR_MODE0_AESU_CTR,
2753 },
2754 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2755 .alg.crypto = {
2756 .cra_name = "ecb(des)",
2757 .cra_driver_name = "ecb-des-talitos",
2758 .cra_blocksize = DES_BLOCK_SIZE,
2759 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2760 CRYPTO_ALG_ASYNC,
2761 .cra_ablkcipher = {
2762 .min_keysize = DES_KEY_SIZE,
2763 .max_keysize = DES_KEY_SIZE,
2764 .setkey = ablkcipher_des_setkey,
2765 }
2766 },
2767 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2768 DESC_HDR_SEL0_DEU,
2769 },
2770 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2771 .alg.crypto = {
2772 .cra_name = "cbc(des)",
2773 .cra_driver_name = "cbc-des-talitos",
2774 .cra_blocksize = DES_BLOCK_SIZE,
2775 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2776 CRYPTO_ALG_ASYNC,
2777 .cra_ablkcipher = {
2778 .min_keysize = DES_KEY_SIZE,
2779 .max_keysize = DES_KEY_SIZE,
2780 .ivsize = DES_BLOCK_SIZE,
2781 .setkey = ablkcipher_des_setkey,
2782 }
2783 },
2784 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2785 DESC_HDR_SEL0_DEU |
2786 DESC_HDR_MODE0_DEU_CBC,
2787 },
2788 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2789 .alg.crypto = {
2790 .cra_name = "ecb(des3_ede)",
2791 .cra_driver_name = "ecb-3des-talitos",
2792 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2793 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2794 CRYPTO_ALG_ASYNC,
2795 .cra_ablkcipher = {
2796 .min_keysize = DES3_EDE_KEY_SIZE,
2797 .max_keysize = DES3_EDE_KEY_SIZE,
2798 .setkey = ablkcipher_des3_setkey,
2799 }
2800 },
2801 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2802 DESC_HDR_SEL0_DEU |
2803 DESC_HDR_MODE0_DEU_3DES,
2804 },
2805 { .type = CRYPTO_ALG_TYPE_ABLKCIPHER,
2806 .alg.crypto = {
2807 .cra_name = "cbc(des3_ede)",
2808 .cra_driver_name = "cbc-3des-talitos",
2809 .cra_blocksize = DES3_EDE_BLOCK_SIZE,
2810 .cra_flags = CRYPTO_ALG_TYPE_ABLKCIPHER |
2811 CRYPTO_ALG_ASYNC,
2812 .cra_ablkcipher = {
2813 .min_keysize = DES3_EDE_KEY_SIZE,
2814 .max_keysize = DES3_EDE_KEY_SIZE,
2815 .ivsize = DES3_EDE_BLOCK_SIZE,
2816 .setkey = ablkcipher_des3_setkey,
2817 }
2818 },
2819 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2820 DESC_HDR_SEL0_DEU |
2821 DESC_HDR_MODE0_DEU_CBC |
2822 DESC_HDR_MODE0_DEU_3DES,
2823 },
2824
2825 { .type = CRYPTO_ALG_TYPE_AHASH,
2826 .alg.hash = {
2827 .halg.digestsize = MD5_DIGEST_SIZE,
2828 .halg.statesize = sizeof(struct talitos_export_state),
2829 .halg.base = {
2830 .cra_name = "md5",
2831 .cra_driver_name = "md5-talitos",
2832 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2833 .cra_flags = CRYPTO_ALG_ASYNC,
2834 }
2835 },
2836 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2837 DESC_HDR_SEL0_MDEUA |
2838 DESC_HDR_MODE0_MDEU_MD5,
2839 },
2840 { .type = CRYPTO_ALG_TYPE_AHASH,
2841 .alg.hash = {
2842 .halg.digestsize = SHA1_DIGEST_SIZE,
2843 .halg.statesize = sizeof(struct talitos_export_state),
2844 .halg.base = {
2845 .cra_name = "sha1",
2846 .cra_driver_name = "sha1-talitos",
2847 .cra_blocksize = SHA1_BLOCK_SIZE,
2848 .cra_flags = CRYPTO_ALG_ASYNC,
2849 }
2850 },
2851 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2852 DESC_HDR_SEL0_MDEUA |
2853 DESC_HDR_MODE0_MDEU_SHA1,
2854 },
2855 { .type = CRYPTO_ALG_TYPE_AHASH,
2856 .alg.hash = {
2857 .halg.digestsize = SHA224_DIGEST_SIZE,
2858 .halg.statesize = sizeof(struct talitos_export_state),
2859 .halg.base = {
2860 .cra_name = "sha224",
2861 .cra_driver_name = "sha224-talitos",
2862 .cra_blocksize = SHA224_BLOCK_SIZE,
2863 .cra_flags = CRYPTO_ALG_ASYNC,
2864 }
2865 },
2866 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2867 DESC_HDR_SEL0_MDEUA |
2868 DESC_HDR_MODE0_MDEU_SHA224,
2869 },
2870 { .type = CRYPTO_ALG_TYPE_AHASH,
2871 .alg.hash = {
2872 .halg.digestsize = SHA256_DIGEST_SIZE,
2873 .halg.statesize = sizeof(struct talitos_export_state),
2874 .halg.base = {
2875 .cra_name = "sha256",
2876 .cra_driver_name = "sha256-talitos",
2877 .cra_blocksize = SHA256_BLOCK_SIZE,
2878 .cra_flags = CRYPTO_ALG_ASYNC,
2879 }
2880 },
2881 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2882 DESC_HDR_SEL0_MDEUA |
2883 DESC_HDR_MODE0_MDEU_SHA256,
2884 },
2885 { .type = CRYPTO_ALG_TYPE_AHASH,
2886 .alg.hash = {
2887 .halg.digestsize = SHA384_DIGEST_SIZE,
2888 .halg.statesize = sizeof(struct talitos_export_state),
2889 .halg.base = {
2890 .cra_name = "sha384",
2891 .cra_driver_name = "sha384-talitos",
2892 .cra_blocksize = SHA384_BLOCK_SIZE,
2893 .cra_flags = CRYPTO_ALG_ASYNC,
2894 }
2895 },
2896 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2897 DESC_HDR_SEL0_MDEUB |
2898 DESC_HDR_MODE0_MDEUB_SHA384,
2899 },
2900 { .type = CRYPTO_ALG_TYPE_AHASH,
2901 .alg.hash = {
2902 .halg.digestsize = SHA512_DIGEST_SIZE,
2903 .halg.statesize = sizeof(struct talitos_export_state),
2904 .halg.base = {
2905 .cra_name = "sha512",
2906 .cra_driver_name = "sha512-talitos",
2907 .cra_blocksize = SHA512_BLOCK_SIZE,
2908 .cra_flags = CRYPTO_ALG_ASYNC,
2909 }
2910 },
2911 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2912 DESC_HDR_SEL0_MDEUB |
2913 DESC_HDR_MODE0_MDEUB_SHA512,
2914 },
2915 { .type = CRYPTO_ALG_TYPE_AHASH,
2916 .alg.hash = {
2917 .halg.digestsize = MD5_DIGEST_SIZE,
2918 .halg.statesize = sizeof(struct talitos_export_state),
2919 .halg.base = {
2920 .cra_name = "hmac(md5)",
2921 .cra_driver_name = "hmac-md5-talitos",
2922 .cra_blocksize = MD5_HMAC_BLOCK_SIZE,
2923 .cra_flags = CRYPTO_ALG_ASYNC,
2924 }
2925 },
2926 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2927 DESC_HDR_SEL0_MDEUA |
2928 DESC_HDR_MODE0_MDEU_MD5,
2929 },
2930 { .type = CRYPTO_ALG_TYPE_AHASH,
2931 .alg.hash = {
2932 .halg.digestsize = SHA1_DIGEST_SIZE,
2933 .halg.statesize = sizeof(struct talitos_export_state),
2934 .halg.base = {
2935 .cra_name = "hmac(sha1)",
2936 .cra_driver_name = "hmac-sha1-talitos",
2937 .cra_blocksize = SHA1_BLOCK_SIZE,
2938 .cra_flags = CRYPTO_ALG_ASYNC,
2939 }
2940 },
2941 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2942 DESC_HDR_SEL0_MDEUA |
2943 DESC_HDR_MODE0_MDEU_SHA1,
2944 },
2945 { .type = CRYPTO_ALG_TYPE_AHASH,
2946 .alg.hash = {
2947 .halg.digestsize = SHA224_DIGEST_SIZE,
2948 .halg.statesize = sizeof(struct talitos_export_state),
2949 .halg.base = {
2950 .cra_name = "hmac(sha224)",
2951 .cra_driver_name = "hmac-sha224-talitos",
2952 .cra_blocksize = SHA224_BLOCK_SIZE,
2953 .cra_flags = CRYPTO_ALG_ASYNC,
2954 }
2955 },
2956 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2957 DESC_HDR_SEL0_MDEUA |
2958 DESC_HDR_MODE0_MDEU_SHA224,
2959 },
2960 { .type = CRYPTO_ALG_TYPE_AHASH,
2961 .alg.hash = {
2962 .halg.digestsize = SHA256_DIGEST_SIZE,
2963 .halg.statesize = sizeof(struct talitos_export_state),
2964 .halg.base = {
2965 .cra_name = "hmac(sha256)",
2966 .cra_driver_name = "hmac-sha256-talitos",
2967 .cra_blocksize = SHA256_BLOCK_SIZE,
2968 .cra_flags = CRYPTO_ALG_ASYNC,
2969 }
2970 },
2971 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2972 DESC_HDR_SEL0_MDEUA |
2973 DESC_HDR_MODE0_MDEU_SHA256,
2974 },
2975 { .type = CRYPTO_ALG_TYPE_AHASH,
2976 .alg.hash = {
2977 .halg.digestsize = SHA384_DIGEST_SIZE,
2978 .halg.statesize = sizeof(struct talitos_export_state),
2979 .halg.base = {
2980 .cra_name = "hmac(sha384)",
2981 .cra_driver_name = "hmac-sha384-talitos",
2982 .cra_blocksize = SHA384_BLOCK_SIZE,
2983 .cra_flags = CRYPTO_ALG_ASYNC,
2984 }
2985 },
2986 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
2987 DESC_HDR_SEL0_MDEUB |
2988 DESC_HDR_MODE0_MDEUB_SHA384,
2989 },
2990 { .type = CRYPTO_ALG_TYPE_AHASH,
2991 .alg.hash = {
2992 .halg.digestsize = SHA512_DIGEST_SIZE,
2993 .halg.statesize = sizeof(struct talitos_export_state),
2994 .halg.base = {
2995 .cra_name = "hmac(sha512)",
2996 .cra_driver_name = "hmac-sha512-talitos",
2997 .cra_blocksize = SHA512_BLOCK_SIZE,
2998 .cra_flags = CRYPTO_ALG_ASYNC,
2999 }
3000 },
3001 .desc_hdr_template = DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3002 DESC_HDR_SEL0_MDEUB |
3003 DESC_HDR_MODE0_MDEUB_SHA512,
3004 }
3005 };
3006
3007 struct talitos_crypto_alg {
3008 struct list_head entry;
3009 struct device *dev;
3010 struct talitos_alg_template algt;
3011 };
3012
3013 static int talitos_init_common(struct talitos_ctx *ctx,
3014 struct talitos_crypto_alg *talitos_alg)
3015 {
3016 struct talitos_private *priv;
3017
3018
3019 ctx->dev = talitos_alg->dev;
3020
3021
3022 priv = dev_get_drvdata(ctx->dev);
3023 ctx->ch = atomic_inc_return(&priv->last_chan) &
3024 (priv->num_channels - 1);
3025
3026
3027 ctx->desc_hdr_template = talitos_alg->algt.desc_hdr_template;
3028
3029
3030 ctx->desc_hdr_template |= DESC_HDR_DONE_NOTIFY;
3031
3032 return 0;
3033 }
3034
3035 static int talitos_cra_init(struct crypto_tfm *tfm)
3036 {
3037 struct crypto_alg *alg = tfm->__crt_alg;
3038 struct talitos_crypto_alg *talitos_alg;
3039 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3040
3041 if ((alg->cra_flags & CRYPTO_ALG_TYPE_MASK) == CRYPTO_ALG_TYPE_AHASH)
3042 talitos_alg = container_of(__crypto_ahash_alg(alg),
3043 struct talitos_crypto_alg,
3044 algt.alg.hash);
3045 else
3046 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3047 algt.alg.crypto);
3048
3049 return talitos_init_common(ctx, talitos_alg);
3050 }
3051
3052 static int talitos_cra_init_aead(struct crypto_aead *tfm)
3053 {
3054 struct aead_alg *alg = crypto_aead_alg(tfm);
3055 struct talitos_crypto_alg *talitos_alg;
3056 struct talitos_ctx *ctx = crypto_aead_ctx(tfm);
3057
3058 talitos_alg = container_of(alg, struct talitos_crypto_alg,
3059 algt.alg.aead);
3060
3061 return talitos_init_common(ctx, talitos_alg);
3062 }
3063
3064 static int talitos_cra_init_ahash(struct crypto_tfm *tfm)
3065 {
3066 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3067
3068 talitos_cra_init(tfm);
3069
3070 ctx->keylen = 0;
3071 crypto_ahash_set_reqsize(__crypto_ahash_cast(tfm),
3072 sizeof(struct talitos_ahash_req_ctx));
3073
3074 return 0;
3075 }
3076
3077 static void talitos_cra_exit(struct crypto_tfm *tfm)
3078 {
3079 struct talitos_ctx *ctx = crypto_tfm_ctx(tfm);
3080 struct device *dev = ctx->dev;
3081
3082 if (ctx->keylen)
3083 dma_unmap_single(dev, ctx->dma_key, ctx->keylen, DMA_TO_DEVICE);
3084 }
3085
3086
3087
3088
3089
3090
3091 static int hw_supports(struct device *dev, __be32 desc_hdr_template)
3092 {
3093 struct talitos_private *priv = dev_get_drvdata(dev);
3094 int ret;
3095
3096 ret = (1 << DESC_TYPE(desc_hdr_template) & priv->desc_types) &&
3097 (1 << PRIMARY_EU(desc_hdr_template) & priv->exec_units);
3098
3099 if (SECONDARY_EU(desc_hdr_template))
3100 ret = ret && (1 << SECONDARY_EU(desc_hdr_template)
3101 & priv->exec_units);
3102
3103 return ret;
3104 }
3105
3106 static int talitos_remove(struct platform_device *ofdev)
3107 {
3108 struct device *dev = &ofdev->dev;
3109 struct talitos_private *priv = dev_get_drvdata(dev);
3110 struct talitos_crypto_alg *t_alg, *n;
3111 int i;
3112
3113 list_for_each_entry_safe(t_alg, n, &priv->alg_list, entry) {
3114 switch (t_alg->algt.type) {
3115 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3116 break;
3117 case CRYPTO_ALG_TYPE_AEAD:
3118 crypto_unregister_aead(&t_alg->algt.alg.aead);
3119 break;
3120 case CRYPTO_ALG_TYPE_AHASH:
3121 crypto_unregister_ahash(&t_alg->algt.alg.hash);
3122 break;
3123 }
3124 list_del(&t_alg->entry);
3125 }
3126
3127 if (hw_supports(dev, DESC_HDR_SEL0_RNG))
3128 talitos_unregister_rng(dev);
3129
3130 for (i = 0; i < 2; i++)
3131 if (priv->irq[i]) {
3132 free_irq(priv->irq[i], dev);
3133 irq_dispose_mapping(priv->irq[i]);
3134 }
3135
3136 tasklet_kill(&priv->done_task[0]);
3137 if (priv->irq[1])
3138 tasklet_kill(&priv->done_task[1]);
3139
3140 return 0;
3141 }
3142
3143 static struct talitos_crypto_alg *talitos_alg_alloc(struct device *dev,
3144 struct talitos_alg_template
3145 *template)
3146 {
3147 struct talitos_private *priv = dev_get_drvdata(dev);
3148 struct talitos_crypto_alg *t_alg;
3149 struct crypto_alg *alg;
3150
3151 t_alg = devm_kzalloc(dev, sizeof(struct talitos_crypto_alg),
3152 GFP_KERNEL);
3153 if (!t_alg)
3154 return ERR_PTR(-ENOMEM);
3155
3156 t_alg->algt = *template;
3157
3158 switch (t_alg->algt.type) {
3159 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3160 alg = &t_alg->algt.alg.crypto;
3161 alg->cra_init = talitos_cra_init;
3162 alg->cra_exit = talitos_cra_exit;
3163 alg->cra_type = &crypto_ablkcipher_type;
3164 alg->cra_ablkcipher.setkey = alg->cra_ablkcipher.setkey ?:
3165 ablkcipher_setkey;
3166 alg->cra_ablkcipher.encrypt = ablkcipher_encrypt;
3167 alg->cra_ablkcipher.decrypt = ablkcipher_decrypt;
3168 break;
3169 case CRYPTO_ALG_TYPE_AEAD:
3170 alg = &t_alg->algt.alg.aead.base;
3171 alg->cra_exit = talitos_cra_exit;
3172 t_alg->algt.alg.aead.init = talitos_cra_init_aead;
3173 t_alg->algt.alg.aead.setkey = t_alg->algt.alg.aead.setkey ?:
3174 aead_setkey;
3175 t_alg->algt.alg.aead.encrypt = aead_encrypt;
3176 t_alg->algt.alg.aead.decrypt = aead_decrypt;
3177 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3178 !strncmp(alg->cra_name, "authenc(hmac(sha224)", 20)) {
3179 devm_kfree(dev, t_alg);
3180 return ERR_PTR(-ENOTSUPP);
3181 }
3182 break;
3183 case CRYPTO_ALG_TYPE_AHASH:
3184 alg = &t_alg->algt.alg.hash.halg.base;
3185 alg->cra_init = talitos_cra_init_ahash;
3186 alg->cra_exit = talitos_cra_exit;
3187 t_alg->algt.alg.hash.init = ahash_init;
3188 t_alg->algt.alg.hash.update = ahash_update;
3189 t_alg->algt.alg.hash.final = ahash_final;
3190 t_alg->algt.alg.hash.finup = ahash_finup;
3191 t_alg->algt.alg.hash.digest = ahash_digest;
3192 if (!strncmp(alg->cra_name, "hmac", 4))
3193 t_alg->algt.alg.hash.setkey = ahash_setkey;
3194 t_alg->algt.alg.hash.import = ahash_import;
3195 t_alg->algt.alg.hash.export = ahash_export;
3196
3197 if (!(priv->features & TALITOS_FTR_HMAC_OK) &&
3198 !strncmp(alg->cra_name, "hmac", 4)) {
3199 devm_kfree(dev, t_alg);
3200 return ERR_PTR(-ENOTSUPP);
3201 }
3202 if (!(priv->features & TALITOS_FTR_SHA224_HWINIT) &&
3203 (!strcmp(alg->cra_name, "sha224") ||
3204 !strcmp(alg->cra_name, "hmac(sha224)"))) {
3205 t_alg->algt.alg.hash.init = ahash_init_sha224_swinit;
3206 t_alg->algt.desc_hdr_template =
3207 DESC_HDR_TYPE_COMMON_NONSNOOP_NO_AFEU |
3208 DESC_HDR_SEL0_MDEUA |
3209 DESC_HDR_MODE0_MDEU_SHA256;
3210 }
3211 break;
3212 default:
3213 dev_err(dev, "unknown algorithm type %d\n", t_alg->algt.type);
3214 devm_kfree(dev, t_alg);
3215 return ERR_PTR(-EINVAL);
3216 }
3217
3218 alg->cra_module = THIS_MODULE;
3219 if (t_alg->algt.priority)
3220 alg->cra_priority = t_alg->algt.priority;
3221 else
3222 alg->cra_priority = TALITOS_CRA_PRIORITY;
3223 if (has_ftr_sec1(priv))
3224 alg->cra_alignmask = 3;
3225 else
3226 alg->cra_alignmask = 0;
3227 alg->cra_ctxsize = sizeof(struct talitos_ctx);
3228 alg->cra_flags |= CRYPTO_ALG_KERN_DRIVER_ONLY;
3229
3230 t_alg->dev = dev;
3231
3232 return t_alg;
3233 }
3234
3235 static int talitos_probe_irq(struct platform_device *ofdev)
3236 {
3237 struct device *dev = &ofdev->dev;
3238 struct device_node *np = ofdev->dev.of_node;
3239 struct talitos_private *priv = dev_get_drvdata(dev);
3240 int err;
3241 bool is_sec1 = has_ftr_sec1(priv);
3242
3243 priv->irq[0] = irq_of_parse_and_map(np, 0);
3244 if (!priv->irq[0]) {
3245 dev_err(dev, "failed to map irq\n");
3246 return -EINVAL;
3247 }
3248 if (is_sec1) {
3249 err = request_irq(priv->irq[0], talitos1_interrupt_4ch, 0,
3250 dev_driver_string(dev), dev);
3251 goto primary_out;
3252 }
3253
3254 priv->irq[1] = irq_of_parse_and_map(np, 1);
3255
3256
3257 if (!priv->irq[1]) {
3258 err = request_irq(priv->irq[0], talitos2_interrupt_4ch, 0,
3259 dev_driver_string(dev), dev);
3260 goto primary_out;
3261 }
3262
3263 err = request_irq(priv->irq[0], talitos2_interrupt_ch0_2, 0,
3264 dev_driver_string(dev), dev);
3265 if (err)
3266 goto primary_out;
3267
3268
3269 err = request_irq(priv->irq[1], talitos2_interrupt_ch1_3, 0,
3270 dev_driver_string(dev), dev);
3271 if (err) {
3272 dev_err(dev, "failed to request secondary irq\n");
3273 irq_dispose_mapping(priv->irq[1]);
3274 priv->irq[1] = 0;
3275 }
3276
3277 return err;
3278
3279 primary_out:
3280 if (err) {
3281 dev_err(dev, "failed to request primary irq\n");
3282 irq_dispose_mapping(priv->irq[0]);
3283 priv->irq[0] = 0;
3284 }
3285
3286 return err;
3287 }
3288
3289 static int talitos_probe(struct platform_device *ofdev)
3290 {
3291 struct device *dev = &ofdev->dev;
3292 struct device_node *np = ofdev->dev.of_node;
3293 struct talitos_private *priv;
3294 int i, err;
3295 int stride;
3296 struct resource *res;
3297
3298 priv = devm_kzalloc(dev, sizeof(struct talitos_private), GFP_KERNEL);
3299 if (!priv)
3300 return -ENOMEM;
3301
3302 INIT_LIST_HEAD(&priv->alg_list);
3303
3304 dev_set_drvdata(dev, priv);
3305
3306 priv->ofdev = ofdev;
3307
3308 spin_lock_init(&priv->reg_lock);
3309
3310 res = platform_get_resource(ofdev, IORESOURCE_MEM, 0);
3311 if (!res)
3312 return -ENXIO;
3313 priv->reg = devm_ioremap(dev, res->start, resource_size(res));
3314 if (!priv->reg) {
3315 dev_err(dev, "failed to of_iomap\n");
3316 err = -ENOMEM;
3317 goto err_out;
3318 }
3319
3320
3321 of_property_read_u32(np, "fsl,num-channels", &priv->num_channels);
3322 of_property_read_u32(np, "fsl,channel-fifo-len", &priv->chfifo_len);
3323 of_property_read_u32(np, "fsl,exec-units-mask", &priv->exec_units);
3324 of_property_read_u32(np, "fsl,descriptor-types-mask",
3325 &priv->desc_types);
3326
3327 if (!is_power_of_2(priv->num_channels) || !priv->chfifo_len ||
3328 !priv->exec_units || !priv->desc_types) {
3329 dev_err(dev, "invalid property data in device tree node\n");
3330 err = -EINVAL;
3331 goto err_out;
3332 }
3333
3334 if (of_device_is_compatible(np, "fsl,sec3.0"))
3335 priv->features |= TALITOS_FTR_SRC_LINK_TBL_LEN_INCLUDES_EXTENT;
3336
3337 if (of_device_is_compatible(np, "fsl,sec2.1"))
3338 priv->features |= TALITOS_FTR_HW_AUTH_CHECK |
3339 TALITOS_FTR_SHA224_HWINIT |
3340 TALITOS_FTR_HMAC_OK;
3341
3342 if (of_device_is_compatible(np, "fsl,sec1.0"))
3343 priv->features |= TALITOS_FTR_SEC1;
3344
3345 if (of_device_is_compatible(np, "fsl,sec1.2")) {
3346 priv->reg_deu = priv->reg + TALITOS12_DEU;
3347 priv->reg_aesu = priv->reg + TALITOS12_AESU;
3348 priv->reg_mdeu = priv->reg + TALITOS12_MDEU;
3349 stride = TALITOS1_CH_STRIDE;
3350 } else if (of_device_is_compatible(np, "fsl,sec1.0")) {
3351 priv->reg_deu = priv->reg + TALITOS10_DEU;
3352 priv->reg_aesu = priv->reg + TALITOS10_AESU;
3353 priv->reg_mdeu = priv->reg + TALITOS10_MDEU;
3354 priv->reg_afeu = priv->reg + TALITOS10_AFEU;
3355 priv->reg_rngu = priv->reg + TALITOS10_RNGU;
3356 priv->reg_pkeu = priv->reg + TALITOS10_PKEU;
3357 stride = TALITOS1_CH_STRIDE;
3358 } else {
3359 priv->reg_deu = priv->reg + TALITOS2_DEU;
3360 priv->reg_aesu = priv->reg + TALITOS2_AESU;
3361 priv->reg_mdeu = priv->reg + TALITOS2_MDEU;
3362 priv->reg_afeu = priv->reg + TALITOS2_AFEU;
3363 priv->reg_rngu = priv->reg + TALITOS2_RNGU;
3364 priv->reg_pkeu = priv->reg + TALITOS2_PKEU;
3365 priv->reg_keu = priv->reg + TALITOS2_KEU;
3366 priv->reg_crcu = priv->reg + TALITOS2_CRCU;
3367 stride = TALITOS2_CH_STRIDE;
3368 }
3369
3370 err = talitos_probe_irq(ofdev);
3371 if (err)
3372 goto err_out;
3373
3374 if (has_ftr_sec1(priv)) {
3375 if (priv->num_channels == 1)
3376 tasklet_init(&priv->done_task[0], talitos1_done_ch0,
3377 (unsigned long)dev);
3378 else
3379 tasklet_init(&priv->done_task[0], talitos1_done_4ch,
3380 (unsigned long)dev);
3381 } else {
3382 if (priv->irq[1]) {
3383 tasklet_init(&priv->done_task[0], talitos2_done_ch0_2,
3384 (unsigned long)dev);
3385 tasklet_init(&priv->done_task[1], talitos2_done_ch1_3,
3386 (unsigned long)dev);
3387 } else if (priv->num_channels == 1) {
3388 tasklet_init(&priv->done_task[0], talitos2_done_ch0,
3389 (unsigned long)dev);
3390 } else {
3391 tasklet_init(&priv->done_task[0], talitos2_done_4ch,
3392 (unsigned long)dev);
3393 }
3394 }
3395
3396 priv->chan = devm_kcalloc(dev,
3397 priv->num_channels,
3398 sizeof(struct talitos_channel),
3399 GFP_KERNEL);
3400 if (!priv->chan) {
3401 dev_err(dev, "failed to allocate channel management space\n");
3402 err = -ENOMEM;
3403 goto err_out;
3404 }
3405
3406 priv->fifo_len = roundup_pow_of_two(priv->chfifo_len);
3407
3408 for (i = 0; i < priv->num_channels; i++) {
3409 priv->chan[i].reg = priv->reg + stride * (i + 1);
3410 if (!priv->irq[1] || !(i & 1))
3411 priv->chan[i].reg += TALITOS_CH_BASE_OFFSET;
3412
3413 spin_lock_init(&priv->chan[i].head_lock);
3414 spin_lock_init(&priv->chan[i].tail_lock);
3415
3416 priv->chan[i].fifo = devm_kcalloc(dev,
3417 priv->fifo_len,
3418 sizeof(struct talitos_request),
3419 GFP_KERNEL);
3420 if (!priv->chan[i].fifo) {
3421 dev_err(dev, "failed to allocate request fifo %d\n", i);
3422 err = -ENOMEM;
3423 goto err_out;
3424 }
3425
3426 atomic_set(&priv->chan[i].submit_count,
3427 -(priv->chfifo_len - 1));
3428 }
3429
3430 dma_set_mask(dev, DMA_BIT_MASK(36));
3431
3432
3433 err = init_device(dev);
3434 if (err) {
3435 dev_err(dev, "failed to initialize device\n");
3436 goto err_out;
3437 }
3438
3439
3440 if (hw_supports(dev, DESC_HDR_SEL0_RNG)) {
3441 err = talitos_register_rng(dev);
3442 if (err) {
3443 dev_err(dev, "failed to register hwrng: %d\n", err);
3444 goto err_out;
3445 } else
3446 dev_info(dev, "hwrng\n");
3447 }
3448
3449
3450 for (i = 0; i < ARRAY_SIZE(driver_algs); i++) {
3451 if (hw_supports(dev, driver_algs[i].desc_hdr_template)) {
3452 struct talitos_crypto_alg *t_alg;
3453 struct crypto_alg *alg = NULL;
3454
3455 t_alg = talitos_alg_alloc(dev, &driver_algs[i]);
3456 if (IS_ERR(t_alg)) {
3457 err = PTR_ERR(t_alg);
3458 if (err == -ENOTSUPP)
3459 continue;
3460 goto err_out;
3461 }
3462
3463 switch (t_alg->algt.type) {
3464 case CRYPTO_ALG_TYPE_ABLKCIPHER:
3465 err = crypto_register_alg(
3466 &t_alg->algt.alg.crypto);
3467 alg = &t_alg->algt.alg.crypto;
3468 break;
3469
3470 case CRYPTO_ALG_TYPE_AEAD:
3471 err = crypto_register_aead(
3472 &t_alg->algt.alg.aead);
3473 alg = &t_alg->algt.alg.aead.base;
3474 break;
3475
3476 case CRYPTO_ALG_TYPE_AHASH:
3477 err = crypto_register_ahash(
3478 &t_alg->algt.alg.hash);
3479 alg = &t_alg->algt.alg.hash.halg.base;
3480 break;
3481 }
3482 if (err) {
3483 dev_err(dev, "%s alg registration failed\n",
3484 alg->cra_driver_name);
3485 devm_kfree(dev, t_alg);
3486 } else
3487 list_add_tail(&t_alg->entry, &priv->alg_list);
3488 }
3489 }
3490 if (!list_empty(&priv->alg_list))
3491 dev_info(dev, "%s algorithms registered in /proc/crypto\n",
3492 (char *)of_get_property(np, "compatible", NULL));
3493
3494 return 0;
3495
3496 err_out:
3497 talitos_remove(ofdev);
3498
3499 return err;
3500 }
3501
3502 static const struct of_device_id talitos_match[] = {
3503 #ifdef CONFIG_CRYPTO_DEV_TALITOS1
3504 {
3505 .compatible = "fsl,sec1.0",
3506 },
3507 #endif
3508 #ifdef CONFIG_CRYPTO_DEV_TALITOS2
3509 {
3510 .compatible = "fsl,sec2.0",
3511 },
3512 #endif
3513 {},
3514 };
3515 MODULE_DEVICE_TABLE(of, talitos_match);
3516
3517 static struct platform_driver talitos_driver = {
3518 .driver = {
3519 .name = "talitos",
3520 .of_match_table = talitos_match,
3521 },
3522 .probe = talitos_probe,
3523 .remove = talitos_remove,
3524 };
3525
3526 module_platform_driver(talitos_driver);
3527
3528 MODULE_LICENSE("GPL");
3529 MODULE_AUTHOR("Kim Phillips <kim.phillips@freescale.com>");
3530 MODULE_DESCRIPTION("Freescale integrated security engine (SEC) driver");