Lines Matching refs:dd
83 struct atmel_aes_dev *dd; member
215 static inline u32 atmel_aes_read(struct atmel_aes_dev *dd, u32 offset) in atmel_aes_read() argument
217 return readl_relaxed(dd->io_base + offset); in atmel_aes_read()
220 static inline void atmel_aes_write(struct atmel_aes_dev *dd, in atmel_aes_write() argument
223 writel_relaxed(value, dd->io_base + offset); in atmel_aes_write()
226 static void atmel_aes_read_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_read_n() argument
230 *value = atmel_aes_read(dd, offset); in atmel_aes_read_n()
233 static void atmel_aes_write_n(struct atmel_aes_dev *dd, u32 offset, in atmel_aes_write_n() argument
237 atmel_aes_write(dd, offset, *value); in atmel_aes_write_n()
246 if (!ctx->dd) { in atmel_aes_find_dev()
251 ctx->dd = aes_dd; in atmel_aes_find_dev()
253 aes_dd = ctx->dd; in atmel_aes_find_dev()
261 static int atmel_aes_hw_init(struct atmel_aes_dev *dd) in atmel_aes_hw_init() argument
265 err = clk_prepare_enable(dd->iclk); in atmel_aes_hw_init()
269 if (!(dd->flags & AES_FLAGS_INIT)) { in atmel_aes_hw_init()
270 atmel_aes_write(dd, AES_CR, AES_CR_SWRST); in atmel_aes_hw_init()
271 atmel_aes_write(dd, AES_MR, 0xE << AES_MR_CKEY_OFFSET); in atmel_aes_hw_init()
272 dd->flags |= AES_FLAGS_INIT; in atmel_aes_hw_init()
273 dd->err = 0; in atmel_aes_hw_init()
279 static inline unsigned int atmel_aes_get_version(struct atmel_aes_dev *dd) in atmel_aes_get_version() argument
281 return atmel_aes_read(dd, AES_HW_VERSION) & 0x00000fff; in atmel_aes_get_version()
284 static void atmel_aes_hw_version_init(struct atmel_aes_dev *dd) in atmel_aes_hw_version_init() argument
286 atmel_aes_hw_init(dd); in atmel_aes_hw_version_init()
288 dd->hw_version = atmel_aes_get_version(dd); in atmel_aes_hw_version_init()
290 dev_info(dd->dev, in atmel_aes_hw_version_init()
291 "version: 0x%x\n", dd->hw_version); in atmel_aes_hw_version_init()
293 clk_disable_unprepare(dd->iclk); in atmel_aes_hw_version_init()
296 static void atmel_aes_finish_req(struct atmel_aes_dev *dd, int err) in atmel_aes_finish_req() argument
298 struct ablkcipher_request *req = dd->req; in atmel_aes_finish_req()
300 clk_disable_unprepare(dd->iclk); in atmel_aes_finish_req()
301 dd->flags &= ~AES_FLAGS_BUSY; in atmel_aes_finish_req()
308 struct atmel_aes_dev *dd = data; in atmel_aes_dma_callback() local
311 tasklet_schedule(&dd->done_task); in atmel_aes_dma_callback()
314 static int atmel_aes_crypt_dma(struct atmel_aes_dev *dd, in atmel_aes_crypt_dma() argument
320 dd->dma_size = length; in atmel_aes_crypt_dma()
322 dma_sync_single_for_device(dd->dev, dma_addr_in, length, in atmel_aes_crypt_dma()
324 dma_sync_single_for_device(dd->dev, dma_addr_out, length, in atmel_aes_crypt_dma()
327 if (dd->flags & AES_FLAGS_CFB8) { in atmel_aes_crypt_dma()
328 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
330 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
332 } else if (dd->flags & AES_FLAGS_CFB16) { in atmel_aes_crypt_dma()
333 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
335 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
338 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_crypt_dma()
340 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_crypt_dma()
344 if (dd->flags & (AES_FLAGS_CFB8 | AES_FLAGS_CFB16 | in atmel_aes_crypt_dma()
346 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_aes_crypt_dma()
347 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_aes_crypt_dma()
348 dd->dma_lch_out.dma_conf.src_maxburst = 1; in atmel_aes_crypt_dma()
349 dd->dma_lch_out.dma_conf.dst_maxburst = 1; in atmel_aes_crypt_dma()
351 dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
352 dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
353 dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
354 dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_crypt_dma()
357 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_aes_crypt_dma()
358 dmaengine_slave_config(dd->dma_lch_out.chan, &dd->dma_lch_out.dma_conf); in atmel_aes_crypt_dma()
360 dd->flags |= AES_FLAGS_DMA; in atmel_aes_crypt_dma()
370 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, &sg[0], in atmel_aes_crypt_dma()
376 out_desc = dmaengine_prep_slave_sg(dd->dma_lch_out.chan, &sg[1], in atmel_aes_crypt_dma()
383 out_desc->callback_param = dd; in atmel_aes_crypt_dma()
386 dma_async_issue_pending(dd->dma_lch_out.chan); in atmel_aes_crypt_dma()
389 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_aes_crypt_dma()
394 static int atmel_aes_crypt_cpu_start(struct atmel_aes_dev *dd) in atmel_aes_crypt_cpu_start() argument
396 dd->flags &= ~AES_FLAGS_DMA; in atmel_aes_crypt_cpu_start()
398 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in, in atmel_aes_crypt_cpu_start()
399 dd->dma_size, DMA_TO_DEVICE); in atmel_aes_crypt_cpu_start()
400 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_out, in atmel_aes_crypt_cpu_start()
401 dd->dma_size, DMA_FROM_DEVICE); in atmel_aes_crypt_cpu_start()
404 dd->nb_in_sg = atmel_aes_sg_length(dd->req, dd->in_sg); in atmel_aes_crypt_cpu_start()
405 if (!dd->nb_in_sg) in atmel_aes_crypt_cpu_start()
408 dd->nb_out_sg = atmel_aes_sg_length(dd->req, dd->out_sg); in atmel_aes_crypt_cpu_start()
409 if (!dd->nb_out_sg) in atmel_aes_crypt_cpu_start()
412 dd->bufcnt = sg_copy_to_buffer(dd->in_sg, dd->nb_in_sg, in atmel_aes_crypt_cpu_start()
413 dd->buf_in, dd->total); in atmel_aes_crypt_cpu_start()
415 if (!dd->bufcnt) in atmel_aes_crypt_cpu_start()
418 dd->total -= dd->bufcnt; in atmel_aes_crypt_cpu_start()
420 atmel_aes_write(dd, AES_IER, AES_INT_DATARDY); in atmel_aes_crypt_cpu_start()
421 atmel_aes_write_n(dd, AES_IDATAR(0), (u32 *) dd->buf_in, in atmel_aes_crypt_cpu_start()
422 dd->bufcnt >> 2); in atmel_aes_crypt_cpu_start()
427 static int atmel_aes_crypt_dma_start(struct atmel_aes_dev *dd) in atmel_aes_crypt_dma_start() argument
433 if ((!dd->in_offset) && (!dd->out_offset)) { in atmel_aes_crypt_dma_start()
435 in = IS_ALIGNED((u32)dd->in_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start()
436 IS_ALIGNED(dd->in_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start()
437 out = IS_ALIGNED((u32)dd->out_sg->offset, sizeof(u32)) && in atmel_aes_crypt_dma_start()
438 IS_ALIGNED(dd->out_sg->length, dd->ctx->block_size); in atmel_aes_crypt_dma_start()
441 if (sg_dma_len(dd->in_sg) != sg_dma_len(dd->out_sg)) in atmel_aes_crypt_dma_start()
447 count = min(dd->total, sg_dma_len(dd->in_sg)); in atmel_aes_crypt_dma_start()
448 count = min(count, sg_dma_len(dd->out_sg)); in atmel_aes_crypt_dma_start()
450 err = dma_map_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
452 dev_err(dd->dev, "dma_map_sg() error\n"); in atmel_aes_crypt_dma_start()
456 err = dma_map_sg(dd->dev, dd->out_sg, 1, in atmel_aes_crypt_dma_start()
459 dev_err(dd->dev, "dma_map_sg() error\n"); in atmel_aes_crypt_dma_start()
460 dma_unmap_sg(dd->dev, dd->in_sg, 1, in atmel_aes_crypt_dma_start()
465 addr_in = sg_dma_address(dd->in_sg); in atmel_aes_crypt_dma_start()
466 addr_out = sg_dma_address(dd->out_sg); in atmel_aes_crypt_dma_start()
468 dd->flags |= AES_FLAGS_FAST; in atmel_aes_crypt_dma_start()
471 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_in, in atmel_aes_crypt_dma_start()
472 dd->dma_size, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
475 count = atmel_aes_sg_copy(&dd->in_sg, &dd->in_offset, in atmel_aes_crypt_dma_start()
476 dd->buf_in, dd->buflen, dd->total, 0); in atmel_aes_crypt_dma_start()
478 addr_in = dd->dma_addr_in; in atmel_aes_crypt_dma_start()
479 addr_out = dd->dma_addr_out; in atmel_aes_crypt_dma_start()
481 dd->flags &= ~AES_FLAGS_FAST; in atmel_aes_crypt_dma_start()
484 dd->total -= count; in atmel_aes_crypt_dma_start()
486 err = atmel_aes_crypt_dma(dd, addr_in, addr_out, count); in atmel_aes_crypt_dma_start()
488 if (err && (dd->flags & AES_FLAGS_FAST)) { in atmel_aes_crypt_dma_start()
489 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
490 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_start()
496 static int atmel_aes_write_ctrl(struct atmel_aes_dev *dd) in atmel_aes_write_ctrl() argument
501 err = atmel_aes_hw_init(dd); in atmel_aes_write_ctrl()
507 if (dd->ctx->keylen == AES_KEYSIZE_128) in atmel_aes_write_ctrl()
509 else if (dd->ctx->keylen == AES_KEYSIZE_192) in atmel_aes_write_ctrl()
514 if (dd->flags & AES_FLAGS_CBC) { in atmel_aes_write_ctrl()
516 } else if (dd->flags & AES_FLAGS_CFB) { in atmel_aes_write_ctrl()
518 if (dd->flags & AES_FLAGS_CFB8) in atmel_aes_write_ctrl()
520 else if (dd->flags & AES_FLAGS_CFB16) in atmel_aes_write_ctrl()
522 else if (dd->flags & AES_FLAGS_CFB32) in atmel_aes_write_ctrl()
524 else if (dd->flags & AES_FLAGS_CFB64) in atmel_aes_write_ctrl()
526 else if (dd->flags & AES_FLAGS_CFB128) in atmel_aes_write_ctrl()
528 } else if (dd->flags & AES_FLAGS_OFB) { in atmel_aes_write_ctrl()
530 } else if (dd->flags & AES_FLAGS_CTR) { in atmel_aes_write_ctrl()
536 if (dd->flags & AES_FLAGS_ENCRYPT) in atmel_aes_write_ctrl()
539 if (dd->total > ATMEL_AES_DMA_THRESHOLD) { in atmel_aes_write_ctrl()
541 if (dd->caps.has_dualbuff) in atmel_aes_write_ctrl()
547 atmel_aes_write(dd, AES_CR, valcr); in atmel_aes_write_ctrl()
548 atmel_aes_write(dd, AES_MR, valmr); in atmel_aes_write_ctrl()
550 atmel_aes_write_n(dd, AES_KEYWR(0), dd->ctx->key, in atmel_aes_write_ctrl()
551 dd->ctx->keylen >> 2); in atmel_aes_write_ctrl()
553 if (((dd->flags & AES_FLAGS_CBC) || (dd->flags & AES_FLAGS_CFB) || in atmel_aes_write_ctrl()
554 (dd->flags & AES_FLAGS_OFB) || (dd->flags & AES_FLAGS_CTR)) && in atmel_aes_write_ctrl()
555 dd->req->info) { in atmel_aes_write_ctrl()
556 atmel_aes_write_n(dd, AES_IVR(0), dd->req->info, 4); in atmel_aes_write_ctrl()
562 static int atmel_aes_handle_queue(struct atmel_aes_dev *dd, in atmel_aes_handle_queue() argument
571 spin_lock_irqsave(&dd->lock, flags); in atmel_aes_handle_queue()
573 ret = ablkcipher_enqueue_request(&dd->queue, req); in atmel_aes_handle_queue()
574 if (dd->flags & AES_FLAGS_BUSY) { in atmel_aes_handle_queue()
575 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
578 backlog = crypto_get_backlog(&dd->queue); in atmel_aes_handle_queue()
579 async_req = crypto_dequeue_request(&dd->queue); in atmel_aes_handle_queue()
581 dd->flags |= AES_FLAGS_BUSY; in atmel_aes_handle_queue()
582 spin_unlock_irqrestore(&dd->lock, flags); in atmel_aes_handle_queue()
593 dd->req = req; in atmel_aes_handle_queue()
594 dd->total = req->nbytes; in atmel_aes_handle_queue()
595 dd->in_offset = 0; in atmel_aes_handle_queue()
596 dd->in_sg = req->src; in atmel_aes_handle_queue()
597 dd->out_offset = 0; in atmel_aes_handle_queue()
598 dd->out_sg = req->dst; in atmel_aes_handle_queue()
603 dd->flags = (dd->flags & ~AES_FLAGS_MODE_MASK) | rctx->mode; in atmel_aes_handle_queue()
604 dd->ctx = ctx; in atmel_aes_handle_queue()
605 ctx->dd = dd; in atmel_aes_handle_queue()
607 err = atmel_aes_write_ctrl(dd); in atmel_aes_handle_queue()
609 if (dd->total > ATMEL_AES_DMA_THRESHOLD) in atmel_aes_handle_queue()
610 err = atmel_aes_crypt_dma_start(dd); in atmel_aes_handle_queue()
612 err = atmel_aes_crypt_cpu_start(dd); in atmel_aes_handle_queue()
616 atmel_aes_finish_req(dd, err); in atmel_aes_handle_queue()
617 tasklet_schedule(&dd->queue_task); in atmel_aes_handle_queue()
623 static int atmel_aes_crypt_dma_stop(struct atmel_aes_dev *dd) in atmel_aes_crypt_dma_stop() argument
628 if (dd->flags & AES_FLAGS_DMA) { in atmel_aes_crypt_dma_stop()
630 if (dd->flags & AES_FLAGS_FAST) { in atmel_aes_crypt_dma_stop()
631 dma_unmap_sg(dd->dev, dd->out_sg, 1, DMA_FROM_DEVICE); in atmel_aes_crypt_dma_stop()
632 dma_unmap_sg(dd->dev, dd->in_sg, 1, DMA_TO_DEVICE); in atmel_aes_crypt_dma_stop()
634 dma_sync_single_for_cpu(dd->dev, dd->dma_addr_out, in atmel_aes_crypt_dma_stop()
635 dd->dma_size, DMA_FROM_DEVICE); in atmel_aes_crypt_dma_stop()
638 count = atmel_aes_sg_copy(&dd->out_sg, &dd->out_offset, in atmel_aes_crypt_dma_stop()
639 dd->buf_out, dd->buflen, dd->dma_size, 1); in atmel_aes_crypt_dma_stop()
640 if (count != dd->dma_size) { in atmel_aes_crypt_dma_stop()
651 static int atmel_aes_buff_init(struct atmel_aes_dev *dd) in atmel_aes_buff_init() argument
655 dd->buf_in = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_aes_buff_init()
656 dd->buf_out = (void *)__get_free_pages(GFP_KERNEL, 0); in atmel_aes_buff_init()
657 dd->buflen = PAGE_SIZE; in atmel_aes_buff_init()
658 dd->buflen &= ~(AES_BLOCK_SIZE - 1); in atmel_aes_buff_init()
660 if (!dd->buf_in || !dd->buf_out) { in atmel_aes_buff_init()
661 dev_err(dd->dev, "unable to alloc pages.\n"); in atmel_aes_buff_init()
666 dd->dma_addr_in = dma_map_single(dd->dev, dd->buf_in, in atmel_aes_buff_init()
667 dd->buflen, DMA_TO_DEVICE); in atmel_aes_buff_init()
668 if (dma_mapping_error(dd->dev, dd->dma_addr_in)) { in atmel_aes_buff_init()
669 dev_err(dd->dev, "dma %d bytes error\n", dd->buflen); in atmel_aes_buff_init()
674 dd->dma_addr_out = dma_map_single(dd->dev, dd->buf_out, in atmel_aes_buff_init()
675 dd->buflen, DMA_FROM_DEVICE); in atmel_aes_buff_init()
676 if (dma_mapping_error(dd->dev, dd->dma_addr_out)) { in atmel_aes_buff_init()
677 dev_err(dd->dev, "dma %d bytes error\n", dd->buflen); in atmel_aes_buff_init()
685 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_aes_buff_init()
689 free_page((unsigned long)dd->buf_out); in atmel_aes_buff_init()
690 free_page((unsigned long)dd->buf_in); in atmel_aes_buff_init()
696 static void atmel_aes_buff_cleanup(struct atmel_aes_dev *dd) in atmel_aes_buff_cleanup() argument
698 dma_unmap_single(dd->dev, dd->dma_addr_out, dd->buflen, in atmel_aes_buff_cleanup()
700 dma_unmap_single(dd->dev, dd->dma_addr_in, dd->buflen, in atmel_aes_buff_cleanup()
702 free_page((unsigned long)dd->buf_out); in atmel_aes_buff_cleanup()
703 free_page((unsigned long)dd->buf_in); in atmel_aes_buff_cleanup()
711 struct atmel_aes_dev *dd; in atmel_aes_crypt() local
745 dd = atmel_aes_find_dev(ctx); in atmel_aes_crypt()
746 if (!dd) in atmel_aes_crypt()
751 return atmel_aes_handle_queue(dd, req); in atmel_aes_crypt()
766 static int atmel_aes_dma_init(struct atmel_aes_dev *dd, in atmel_aes_dma_init() argument
776 dd->dma_lch_in.chan = dma_request_slave_channel_compat(mask, in atmel_aes_dma_init()
777 atmel_aes_filter, &pdata->dma_slave->rxdata, dd->dev, "tx"); in atmel_aes_dma_init()
778 if (!dd->dma_lch_in.chan) in atmel_aes_dma_init()
781 dd->dma_lch_in.dma_conf.direction = DMA_MEM_TO_DEV; in atmel_aes_dma_init()
782 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_aes_dma_init()
784 dd->dma_lch_in.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
785 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_aes_dma_init()
787 dd->dma_lch_in.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
788 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_aes_dma_init()
790 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_aes_dma_init()
792 dd->dma_lch_out.chan = dma_request_slave_channel_compat(mask, in atmel_aes_dma_init()
793 atmel_aes_filter, &pdata->dma_slave->txdata, dd->dev, "rx"); in atmel_aes_dma_init()
794 if (!dd->dma_lch_out.chan) in atmel_aes_dma_init()
797 dd->dma_lch_out.dma_conf.direction = DMA_DEV_TO_MEM; in atmel_aes_dma_init()
798 dd->dma_lch_out.dma_conf.src_addr = dd->phys_base + in atmel_aes_dma_init()
800 dd->dma_lch_out.dma_conf.src_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
801 dd->dma_lch_out.dma_conf.src_addr_width = in atmel_aes_dma_init()
803 dd->dma_lch_out.dma_conf.dst_maxburst = dd->caps.max_burst_size; in atmel_aes_dma_init()
804 dd->dma_lch_out.dma_conf.dst_addr_width = in atmel_aes_dma_init()
806 dd->dma_lch_out.dma_conf.device_fc = false; in atmel_aes_dma_init()
811 dma_release_channel(dd->dma_lch_in.chan); in atmel_aes_dma_init()
813 dev_warn(dd->dev, "no DMA channel available\n"); in atmel_aes_dma_init()
817 static void atmel_aes_dma_cleanup(struct atmel_aes_dev *dd) in atmel_aes_dma_cleanup() argument
819 dma_release_channel(dd->dma_lch_in.chan); in atmel_aes_dma_cleanup()
820 dma_release_channel(dd->dma_lch_out.chan); in atmel_aes_dma_cleanup()
1153 struct atmel_aes_dev *dd = (struct atmel_aes_dev *)data; in atmel_aes_queue_task() local
1155 atmel_aes_handle_queue(dd, NULL); in atmel_aes_queue_task()
1160 struct atmel_aes_dev *dd = (struct atmel_aes_dev *) data; in atmel_aes_done_task() local
1163 if (!(dd->flags & AES_FLAGS_DMA)) { in atmel_aes_done_task()
1164 atmel_aes_read_n(dd, AES_ODATAR(0), (u32 *) dd->buf_out, in atmel_aes_done_task()
1165 dd->bufcnt >> 2); in atmel_aes_done_task()
1167 if (sg_copy_from_buffer(dd->out_sg, dd->nb_out_sg, in atmel_aes_done_task()
1168 dd->buf_out, dd->bufcnt)) in atmel_aes_done_task()
1176 err = atmel_aes_crypt_dma_stop(dd); in atmel_aes_done_task()
1178 err = dd->err ? : err; in atmel_aes_done_task()
1180 if (dd->total && !err) { in atmel_aes_done_task()
1181 if (dd->flags & AES_FLAGS_FAST) { in atmel_aes_done_task()
1182 dd->in_sg = sg_next(dd->in_sg); in atmel_aes_done_task()
1183 dd->out_sg = sg_next(dd->out_sg); in atmel_aes_done_task()
1184 if (!dd->in_sg || !dd->out_sg) in atmel_aes_done_task()
1188 err = atmel_aes_crypt_dma_start(dd); in atmel_aes_done_task()
1194 atmel_aes_finish_req(dd, err); in atmel_aes_done_task()
1195 atmel_aes_handle_queue(dd, NULL); in atmel_aes_done_task()
1216 static void atmel_aes_unregister_algs(struct atmel_aes_dev *dd) in atmel_aes_unregister_algs() argument
1222 if (dd->caps.has_cfb64) in atmel_aes_unregister_algs()
1226 static int atmel_aes_register_algs(struct atmel_aes_dev *dd) in atmel_aes_register_algs() argument
1236 if (dd->caps.has_cfb64) { in atmel_aes_register_algs()
1253 static void atmel_aes_get_cap(struct atmel_aes_dev *dd) in atmel_aes_get_cap() argument
1255 dd->caps.has_dualbuff = 0; in atmel_aes_get_cap()
1256 dd->caps.has_cfb64 = 0; in atmel_aes_get_cap()
1257 dd->caps.max_burst_size = 1; in atmel_aes_get_cap()
1260 switch (dd->hw_version & 0xff0) { in atmel_aes_get_cap()
1262 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
1263 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
1264 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
1267 dd->caps.has_dualbuff = 1; in atmel_aes_get_cap()
1268 dd->caps.has_cfb64 = 1; in atmel_aes_get_cap()
1269 dd->caps.max_burst_size = 4; in atmel_aes_get_cap()
1274 dev_warn(dd->dev, in atmel_aes_get_cap()