This source file includes following definitions.
- sun4i_ss_opti_poll
- sun4i_ss_cipher_poll_fallback
- sun4i_ss_cipher_poll
- sun4i_ss_cbc_aes_encrypt
- sun4i_ss_cbc_aes_decrypt
- sun4i_ss_ecb_aes_encrypt
- sun4i_ss_ecb_aes_decrypt
- sun4i_ss_cbc_des_encrypt
- sun4i_ss_cbc_des_decrypt
- sun4i_ss_ecb_des_encrypt
- sun4i_ss_ecb_des_decrypt
- sun4i_ss_cbc_des3_encrypt
- sun4i_ss_cbc_des3_decrypt
- sun4i_ss_ecb_des3_encrypt
- sun4i_ss_ecb_des3_decrypt
- sun4i_ss_cipher_init
- sun4i_ss_cipher_exit
- sun4i_ss_aes_setkey
- sun4i_ss_des_setkey
- sun4i_ss_des3_setkey
1
2
3
4
5
6
7
8
9
10
11
12
13 #include "sun4i-ss.h"
14
15 static int noinline_for_stack sun4i_ss_opti_poll(struct skcipher_request *areq)
16 {
17 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
18 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
19 struct sun4i_ss_ctx *ss = op->ss;
20 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
21 struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
22 u32 mode = ctx->mode;
23
24 u32 rx_cnt = SS_RX_DEFAULT;
25 u32 tx_cnt = 0;
26 u32 spaces;
27 u32 v;
28 int err = 0;
29 unsigned int i;
30 unsigned int ileft = areq->cryptlen;
31 unsigned int oleft = areq->cryptlen;
32 unsigned int todo;
33 struct sg_mapping_iter mi, mo;
34 unsigned int oi, oo;
35 unsigned long flags;
36
37 if (!areq->cryptlen)
38 return 0;
39
40 if (!areq->src || !areq->dst) {
41 dev_err_ratelimited(ss->dev, "ERROR: Some SGs are NULL\n");
42 return -EINVAL;
43 }
44
45 spin_lock_irqsave(&ss->slock, flags);
46
47 for (i = 0; i < op->keylen; i += 4)
48 writel(*(op->key + i / 4), ss->base + SS_KEY0 + i);
49
50 if (areq->iv) {
51 for (i = 0; i < 4 && i < ivsize / 4; i++) {
52 v = *(u32 *)(areq->iv + i * 4);
53 writel(v, ss->base + SS_IV0 + i * 4);
54 }
55 }
56 writel(mode, ss->base + SS_CTL);
57
58 sg_miter_start(&mi, areq->src, sg_nents(areq->src),
59 SG_MITER_FROM_SG | SG_MITER_ATOMIC);
60 sg_miter_start(&mo, areq->dst, sg_nents(areq->dst),
61 SG_MITER_TO_SG | SG_MITER_ATOMIC);
62 sg_miter_next(&mi);
63 sg_miter_next(&mo);
64 if (!mi.addr || !mo.addr) {
65 dev_err_ratelimited(ss->dev, "ERROR: sg_miter return null\n");
66 err = -EINVAL;
67 goto release_ss;
68 }
69
70 ileft = areq->cryptlen / 4;
71 oleft = areq->cryptlen / 4;
72 oi = 0;
73 oo = 0;
74 do {
75 todo = min(rx_cnt, ileft);
76 todo = min_t(size_t, todo, (mi.length - oi) / 4);
77 if (todo) {
78 ileft -= todo;
79 writesl(ss->base + SS_RXFIFO, mi.addr + oi, todo);
80 oi += todo * 4;
81 }
82 if (oi == mi.length) {
83 sg_miter_next(&mi);
84 oi = 0;
85 }
86
87 spaces = readl(ss->base + SS_FCSR);
88 rx_cnt = SS_RXFIFO_SPACES(spaces);
89 tx_cnt = SS_TXFIFO_SPACES(spaces);
90
91 todo = min(tx_cnt, oleft);
92 todo = min_t(size_t, todo, (mo.length - oo) / 4);
93 if (todo) {
94 oleft -= todo;
95 readsl(ss->base + SS_TXFIFO, mo.addr + oo, todo);
96 oo += todo * 4;
97 }
98 if (oo == mo.length) {
99 sg_miter_next(&mo);
100 oo = 0;
101 }
102 } while (oleft);
103
104 if (areq->iv) {
105 for (i = 0; i < 4 && i < ivsize / 4; i++) {
106 v = readl(ss->base + SS_IV0 + i * 4);
107 *(u32 *)(areq->iv + i * 4) = v;
108 }
109 }
110
111 release_ss:
112 sg_miter_stop(&mi);
113 sg_miter_stop(&mo);
114 writel(0, ss->base + SS_CTL);
115 spin_unlock_irqrestore(&ss->slock, flags);
116 return err;
117 }
118
119
120 static int noinline_for_stack sun4i_ss_cipher_poll_fallback(struct skcipher_request *areq)
121 {
122 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
123 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
124 struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
125 SYNC_SKCIPHER_REQUEST_ON_STACK(subreq, op->fallback_tfm);
126 int err;
127
128 skcipher_request_set_sync_tfm(subreq, op->fallback_tfm);
129 skcipher_request_set_callback(subreq, areq->base.flags, NULL,
130 NULL);
131 skcipher_request_set_crypt(subreq, areq->src, areq->dst,
132 areq->cryptlen, areq->iv);
133 if (ctx->mode & SS_DECRYPTION)
134 err = crypto_skcipher_decrypt(subreq);
135 else
136 err = crypto_skcipher_encrypt(subreq);
137 skcipher_request_zero(subreq);
138
139 return err;
140 }
141
142
143 static int sun4i_ss_cipher_poll(struct skcipher_request *areq)
144 {
145 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
146 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
147 struct sun4i_ss_ctx *ss = op->ss;
148 int no_chunk = 1;
149 struct scatterlist *in_sg = areq->src;
150 struct scatterlist *out_sg = areq->dst;
151 unsigned int ivsize = crypto_skcipher_ivsize(tfm);
152 struct sun4i_cipher_req_ctx *ctx = skcipher_request_ctx(areq);
153 struct skcipher_alg *alg = crypto_skcipher_alg(tfm);
154 struct sun4i_ss_alg_template *algt;
155 u32 mode = ctx->mode;
156
157 u32 rx_cnt = SS_RX_DEFAULT;
158 u32 tx_cnt = 0;
159 u32 v;
160 u32 spaces;
161 int err = 0;
162 unsigned int i;
163 unsigned int ileft = areq->cryptlen;
164 unsigned int oleft = areq->cryptlen;
165 unsigned int todo;
166 struct sg_mapping_iter mi, mo;
167 unsigned int oi, oo;
168 unsigned int ob = 0;
169 unsigned int obo = 0;
170 unsigned int obl = 0;
171 unsigned long flags;
172 bool need_fallback;
173
174 if (!areq->cryptlen)
175 return 0;
176
177 if (!areq->src || !areq->dst) {
178 dev_err_ratelimited(ss->dev, "ERROR: Some SGs are NULL\n");
179 return -EINVAL;
180 }
181
182 algt = container_of(alg, struct sun4i_ss_alg_template, alg.crypto);
183 if (areq->cryptlen % algt->alg.crypto.base.cra_blocksize)
184 need_fallback = true;
185
186
187
188
189
190 while (in_sg && no_chunk == 1) {
191 if (in_sg->length % 4)
192 no_chunk = 0;
193 in_sg = sg_next(in_sg);
194 }
195 while (out_sg && no_chunk == 1) {
196 if (out_sg->length % 4)
197 no_chunk = 0;
198 out_sg = sg_next(out_sg);
199 }
200
201 if (no_chunk == 1 && !need_fallback)
202 return sun4i_ss_opti_poll(areq);
203
204 if (need_fallback)
205 return sun4i_ss_cipher_poll_fallback(areq);
206
207 spin_lock_irqsave(&ss->slock, flags);
208
209 for (i = 0; i < op->keylen; i += 4)
210 writel(*(op->key + i / 4), ss->base + SS_KEY0 + i);
211
212 if (areq->iv) {
213 for (i = 0; i < 4 && i < ivsize / 4; i++) {
214 v = *(u32 *)(areq->iv + i * 4);
215 writel(v, ss->base + SS_IV0 + i * 4);
216 }
217 }
218 writel(mode, ss->base + SS_CTL);
219
220 sg_miter_start(&mi, areq->src, sg_nents(areq->src),
221 SG_MITER_FROM_SG | SG_MITER_ATOMIC);
222 sg_miter_start(&mo, areq->dst, sg_nents(areq->dst),
223 SG_MITER_TO_SG | SG_MITER_ATOMIC);
224 sg_miter_next(&mi);
225 sg_miter_next(&mo);
226 if (!mi.addr || !mo.addr) {
227 dev_err_ratelimited(ss->dev, "ERROR: sg_miter return null\n");
228 err = -EINVAL;
229 goto release_ss;
230 }
231 ileft = areq->cryptlen;
232 oleft = areq->cryptlen;
233 oi = 0;
234 oo = 0;
235
236 while (oleft) {
237 if (ileft) {
238 char buf[4 * SS_RX_MAX];
239
240
241
242
243
244 todo = min(rx_cnt, ileft / 4);
245 todo = min_t(size_t, todo, (mi.length - oi) / 4);
246 if (todo && !ob) {
247 writesl(ss->base + SS_RXFIFO, mi.addr + oi,
248 todo);
249 ileft -= todo * 4;
250 oi += todo * 4;
251 } else {
252
253
254
255
256
257
258
259 todo = min(rx_cnt * 4 - ob, ileft);
260 todo = min_t(size_t, todo, mi.length - oi);
261 memcpy(buf + ob, mi.addr + oi, todo);
262 ileft -= todo;
263 oi += todo;
264 ob += todo;
265 if (!(ob % 4)) {
266 writesl(ss->base + SS_RXFIFO, buf,
267 ob / 4);
268 ob = 0;
269 }
270 }
271 if (oi == mi.length) {
272 sg_miter_next(&mi);
273 oi = 0;
274 }
275 }
276
277 spaces = readl(ss->base + SS_FCSR);
278 rx_cnt = SS_RXFIFO_SPACES(spaces);
279 tx_cnt = SS_TXFIFO_SPACES(spaces);
280 dev_dbg(ss->dev,
281 "%x %u/%zu %u/%u cnt=%u %u/%zu %u/%u cnt=%u %u\n",
282 mode,
283 oi, mi.length, ileft, areq->cryptlen, rx_cnt,
284 oo, mo.length, oleft, areq->cryptlen, tx_cnt, ob);
285
286 if (!tx_cnt)
287 continue;
288
289 todo = min(tx_cnt, oleft / 4);
290 todo = min_t(size_t, todo, (mo.length - oo) / 4);
291 if (todo) {
292 readsl(ss->base + SS_TXFIFO, mo.addr + oo, todo);
293 oleft -= todo * 4;
294 oo += todo * 4;
295 if (oo == mo.length) {
296 sg_miter_next(&mo);
297 oo = 0;
298 }
299 } else {
300 char bufo[4 * SS_TX_MAX];
301
302
303
304
305
306 readsl(ss->base + SS_TXFIFO, bufo, tx_cnt);
307 obl = tx_cnt * 4;
308 obo = 0;
309 do {
310
311
312
313
314
315
316 todo = min_t(size_t,
317 mo.length - oo, obl - obo);
318 memcpy(mo.addr + oo, bufo + obo, todo);
319 oleft -= todo;
320 obo += todo;
321 oo += todo;
322 if (oo == mo.length) {
323 sg_miter_next(&mo);
324 oo = 0;
325 }
326 } while (obo < obl);
327
328 }
329 }
330 if (areq->iv) {
331 for (i = 0; i < 4 && i < ivsize / 4; i++) {
332 v = readl(ss->base + SS_IV0 + i * 4);
333 *(u32 *)(areq->iv + i * 4) = v;
334 }
335 }
336
337 release_ss:
338 sg_miter_stop(&mi);
339 sg_miter_stop(&mo);
340 writel(0, ss->base + SS_CTL);
341 spin_unlock_irqrestore(&ss->slock, flags);
342
343 return err;
344 }
345
346
347 int sun4i_ss_cbc_aes_encrypt(struct skcipher_request *areq)
348 {
349 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
350 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
351 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
352
353 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_ENCRYPTION |
354 op->keymode;
355 return sun4i_ss_cipher_poll(areq);
356 }
357
358 int sun4i_ss_cbc_aes_decrypt(struct skcipher_request *areq)
359 {
360 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
361 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
362 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
363
364 rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_DECRYPTION |
365 op->keymode;
366 return sun4i_ss_cipher_poll(areq);
367 }
368
369
370 int sun4i_ss_ecb_aes_encrypt(struct skcipher_request *areq)
371 {
372 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
373 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
374 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
375
376 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_ENCRYPTION |
377 op->keymode;
378 return sun4i_ss_cipher_poll(areq);
379 }
380
381 int sun4i_ss_ecb_aes_decrypt(struct skcipher_request *areq)
382 {
383 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
384 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
385 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
386
387 rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_DECRYPTION |
388 op->keymode;
389 return sun4i_ss_cipher_poll(areq);
390 }
391
392
393 int sun4i_ss_cbc_des_encrypt(struct skcipher_request *areq)
394 {
395 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
396 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
397 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
398
399 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION |
400 op->keymode;
401 return sun4i_ss_cipher_poll(areq);
402 }
403
404 int sun4i_ss_cbc_des_decrypt(struct skcipher_request *areq)
405 {
406 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
407 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
408 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
409
410 rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_DECRYPTION |
411 op->keymode;
412 return sun4i_ss_cipher_poll(areq);
413 }
414
415
416 int sun4i_ss_ecb_des_encrypt(struct skcipher_request *areq)
417 {
418 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
419 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
420 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
421
422 rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION |
423 op->keymode;
424 return sun4i_ss_cipher_poll(areq);
425 }
426
427 int sun4i_ss_ecb_des_decrypt(struct skcipher_request *areq)
428 {
429 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
430 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
431 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
432
433 rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_DECRYPTION |
434 op->keymode;
435 return sun4i_ss_cipher_poll(areq);
436 }
437
438
439 int sun4i_ss_cbc_des3_encrypt(struct skcipher_request *areq)
440 {
441 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
442 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
443 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
444
445 rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION |
446 op->keymode;
447 return sun4i_ss_cipher_poll(areq);
448 }
449
450 int sun4i_ss_cbc_des3_decrypt(struct skcipher_request *areq)
451 {
452 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
453 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
454 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
455
456 rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_DECRYPTION |
457 op->keymode;
458 return sun4i_ss_cipher_poll(areq);
459 }
460
461
462 int sun4i_ss_ecb_des3_encrypt(struct skcipher_request *areq)
463 {
464 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
465 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
466 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
467
468 rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION |
469 op->keymode;
470 return sun4i_ss_cipher_poll(areq);
471 }
472
473 int sun4i_ss_ecb_des3_decrypt(struct skcipher_request *areq)
474 {
475 struct crypto_skcipher *tfm = crypto_skcipher_reqtfm(areq);
476 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
477 struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq);
478
479 rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_DECRYPTION |
480 op->keymode;
481 return sun4i_ss_cipher_poll(areq);
482 }
483
484 int sun4i_ss_cipher_init(struct crypto_tfm *tfm)
485 {
486 struct sun4i_tfm_ctx *op = crypto_tfm_ctx(tfm);
487 struct sun4i_ss_alg_template *algt;
488 const char *name = crypto_tfm_alg_name(tfm);
489
490 memset(op, 0, sizeof(struct sun4i_tfm_ctx));
491
492 algt = container_of(tfm->__crt_alg, struct sun4i_ss_alg_template,
493 alg.crypto.base);
494 op->ss = algt->ss;
495
496 crypto_skcipher_set_reqsize(__crypto_skcipher_cast(tfm),
497 sizeof(struct sun4i_cipher_req_ctx));
498
499 op->fallback_tfm = crypto_alloc_sync_skcipher(name, 0, CRYPTO_ALG_NEED_FALLBACK);
500 if (IS_ERR(op->fallback_tfm)) {
501 dev_err(op->ss->dev, "ERROR: Cannot allocate fallback for %s %ld\n",
502 name, PTR_ERR(op->fallback_tfm));
503 return PTR_ERR(op->fallback_tfm);
504 }
505
506 return 0;
507 }
508
509 void sun4i_ss_cipher_exit(struct crypto_tfm *tfm)
510 {
511 struct sun4i_tfm_ctx *op = crypto_tfm_ctx(tfm);
512 crypto_free_sync_skcipher(op->fallback_tfm);
513 }
514
515
516 int sun4i_ss_aes_setkey(struct crypto_skcipher *tfm, const u8 *key,
517 unsigned int keylen)
518 {
519 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
520 struct sun4i_ss_ctx *ss = op->ss;
521
522 switch (keylen) {
523 case 128 / 8:
524 op->keymode = SS_AES_128BITS;
525 break;
526 case 192 / 8:
527 op->keymode = SS_AES_192BITS;
528 break;
529 case 256 / 8:
530 op->keymode = SS_AES_256BITS;
531 break;
532 default:
533 dev_err(ss->dev, "ERROR: Invalid keylen %u\n", keylen);
534 crypto_skcipher_set_flags(tfm, CRYPTO_TFM_RES_BAD_KEY_LEN);
535 return -EINVAL;
536 }
537 op->keylen = keylen;
538 memcpy(op->key, key, keylen);
539
540 crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
541 crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
542
543 return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen);
544 }
545
546
547 int sun4i_ss_des_setkey(struct crypto_skcipher *tfm, const u8 *key,
548 unsigned int keylen)
549 {
550 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
551 int err;
552
553 err = verify_skcipher_des_key(tfm, key);
554 if (err)
555 return err;
556
557 op->keylen = keylen;
558 memcpy(op->key, key, keylen);
559
560 crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
561 crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
562
563 return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen);
564 }
565
566
567 int sun4i_ss_des3_setkey(struct crypto_skcipher *tfm, const u8 *key,
568 unsigned int keylen)
569 {
570 struct sun4i_tfm_ctx *op = crypto_skcipher_ctx(tfm);
571 int err;
572
573 err = verify_skcipher_des3_key(tfm, key);
574 if (err)
575 return err;
576
577 op->keylen = keylen;
578 memcpy(op->key, key, keylen);
579
580 crypto_sync_skcipher_clear_flags(op->fallback_tfm, CRYPTO_TFM_REQ_MASK);
581 crypto_sync_skcipher_set_flags(op->fallback_tfm, tfm->base.crt_flags & CRYPTO_TFM_REQ_MASK);
582
583 return crypto_sync_skcipher_setkey(op->fallback_tfm, key, keylen);
584
585 }