Lines Matching refs:dd
81 struct atmel_sha_dev *dd; member
102 struct atmel_sha_dev *dd; member
147 static inline u32 atmel_sha_read(struct atmel_sha_dev *dd, u32 offset) in atmel_sha_read() argument
149 return readl_relaxed(dd->io_base + offset); in atmel_sha_read()
152 static inline void atmel_sha_write(struct atmel_sha_dev *dd, in atmel_sha_write() argument
155 writel_relaxed(value, dd->io_base + offset); in atmel_sha_write()
260 struct atmel_sha_dev *dd = NULL; in atmel_sha_init() local
264 if (!tctx->dd) { in atmel_sha_init()
266 dd = tmp; in atmel_sha_init()
269 tctx->dd = dd; in atmel_sha_init()
271 dd = tctx->dd; in atmel_sha_init()
276 ctx->dd = dd; in atmel_sha_init()
280 dev_dbg(dd->dev, "init: digest size: %d\n", in atmel_sha_init()
317 static void atmel_sha_write_ctrl(struct atmel_sha_dev *dd, int dma) in atmel_sha_write_ctrl() argument
319 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_write_ctrl()
323 if (!dd->caps.has_dma) in atmel_sha_write_ctrl()
324 atmel_sha_write(dd, SHA_IER, SHA_INT_TXBUFE); in atmel_sha_write_ctrl()
326 if (dd->caps.has_dualbuff) in atmel_sha_write_ctrl()
329 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_write_ctrl()
347 atmel_sha_write(dd, SHA_CR, valcr); in atmel_sha_write_ctrl()
348 atmel_sha_write(dd, SHA_MR, valmr); in atmel_sha_write_ctrl()
351 static int atmel_sha_xmit_cpu(struct atmel_sha_dev *dd, const u8 *buf, in atmel_sha_xmit_cpu() argument
354 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_cpu()
358 dev_dbg(dd->dev, "xmit_cpu: digcnt: 0x%llx 0x%llx, length: %d, final: %d\n", in atmel_sha_xmit_cpu()
361 atmel_sha_write_ctrl(dd, 0); in atmel_sha_xmit_cpu()
369 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_cpu()
373 dd->flags |= SHA_FLAGS_CPU; in atmel_sha_xmit_cpu()
376 atmel_sha_write(dd, SHA_REG_DIN(count), buffer[count]); in atmel_sha_xmit_cpu()
381 static int atmel_sha_xmit_pdc(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_pdc() argument
384 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_pdc()
387 dev_dbg(dd->dev, "xmit_pdc: digcnt: 0x%llx 0x%llx, length: %d, final: %d\n", in atmel_sha_xmit_pdc()
391 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTDIS); in atmel_sha_xmit_pdc()
392 atmel_sha_write(dd, SHA_TPR, dma_addr1); in atmel_sha_xmit_pdc()
393 atmel_sha_write(dd, SHA_TCR, len32); in atmel_sha_xmit_pdc()
396 atmel_sha_write(dd, SHA_TNPR, dma_addr2); in atmel_sha_xmit_pdc()
397 atmel_sha_write(dd, SHA_TNCR, len32); in atmel_sha_xmit_pdc()
399 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_pdc()
407 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_pdc()
409 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_pdc()
412 atmel_sha_write(dd, SHA_PTCR, SHA_PTCR_TXTEN); in atmel_sha_xmit_pdc()
419 struct atmel_sha_dev *dd = data; in atmel_sha_dma_callback() local
422 atmel_sha_write(dd, SHA_IER, SHA_INT_DATARDY); in atmel_sha_dma_callback()
425 static int atmel_sha_xmit_dma(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_dma() argument
428 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_xmit_dma()
432 dev_dbg(dd->dev, "xmit_dma: digcnt: 0x%llx 0x%llx, length: %d, final: %d\n", in atmel_sha_xmit_dma()
435 dd->dma_lch_in.dma_conf.src_maxburst = 16; in atmel_sha_xmit_dma()
436 dd->dma_lch_in.dma_conf.dst_maxburst = 16; in atmel_sha_xmit_dma()
438 dmaengine_slave_config(dd->dma_lch_in.chan, &dd->dma_lch_in.dma_conf); in atmel_sha_xmit_dma()
446 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 2, in atmel_sha_xmit_dma()
452 in_desc = dmaengine_prep_slave_sg(dd->dma_lch_in.chan, sg, 1, in atmel_sha_xmit_dma()
459 in_desc->callback_param = dd; in atmel_sha_xmit_dma()
461 atmel_sha_write_ctrl(dd, 1); in atmel_sha_xmit_dma()
469 dd->flags |= SHA_FLAGS_FINAL; /* catch last interrupt */ in atmel_sha_xmit_dma()
471 dd->flags |= SHA_FLAGS_DMA_ACTIVE; in atmel_sha_xmit_dma()
475 dma_async_issue_pending(dd->dma_lch_in.chan); in atmel_sha_xmit_dma()
480 static int atmel_sha_xmit_start(struct atmel_sha_dev *dd, dma_addr_t dma_addr1, in atmel_sha_xmit_start() argument
483 if (dd->caps.has_dma) in atmel_sha_xmit_start()
484 return atmel_sha_xmit_dma(dd, dma_addr1, length1, in atmel_sha_xmit_start()
487 return atmel_sha_xmit_pdc(dd, dma_addr1, length1, in atmel_sha_xmit_start()
491 static int atmel_sha_update_cpu(struct atmel_sha_dev *dd) in atmel_sha_update_cpu() argument
493 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_cpu()
501 return atmel_sha_xmit_cpu(dd, ctx->buffer, bufcnt, 1); in atmel_sha_update_cpu()
504 static int atmel_sha_xmit_dma_map(struct atmel_sha_dev *dd, in atmel_sha_xmit_dma_map() argument
508 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_xmit_dma_map()
510 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_xmit_dma_map()
511 dev_err(dd->dev, "dma %u bytes error\n", ctx->buflen + in atmel_sha_xmit_dma_map()
519 return atmel_sha_xmit_start(dd, ctx->dma_addr, length, 0, 0, final); in atmel_sha_xmit_dma_map()
522 static int atmel_sha_update_dma_slow(struct atmel_sha_dev *dd) in atmel_sha_update_dma_slow() argument
524 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_slow()
532 dev_dbg(dd->dev, "slow: bufcnt: %u, digcnt: 0x%llx 0x%llx, final: %d\n", in atmel_sha_update_dma_slow()
541 return atmel_sha_xmit_dma_map(dd, ctx, count, final); in atmel_sha_update_dma_slow()
547 static int atmel_sha_update_dma_start(struct atmel_sha_dev *dd) in atmel_sha_update_dma_start() argument
549 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_start()
558 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
560 dev_dbg(dd->dev, "fast: digcnt: 0x%llx 0x%llx, bufcnt: %u, total: %u\n", in atmel_sha_update_dma_start()
566 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
570 return atmel_sha_update_dma_slow(dd); in atmel_sha_update_dma_start()
599 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, in atmel_sha_update_dma_start()
601 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in atmel_sha_update_dma_start()
602 dev_err(dd->dev, "dma %u bytes error\n", in atmel_sha_update_dma_start()
611 return atmel_sha_xmit_start(dd, ctx->dma_addr, count, 0, in atmel_sha_update_dma_start()
615 if (!dma_map_sg(dd->dev, ctx->sg, 1, in atmel_sha_update_dma_start()
617 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
625 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), in atmel_sha_update_dma_start()
630 if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) { in atmel_sha_update_dma_start()
631 dev_err(dd->dev, "dma_map_sg error\n"); in atmel_sha_update_dma_start()
638 return atmel_sha_xmit_start(dd, sg_dma_address(ctx->sg), length, 0, in atmel_sha_update_dma_start()
642 static int atmel_sha_update_dma_stop(struct atmel_sha_dev *dd) in atmel_sha_update_dma_stop() argument
644 struct atmel_sha_reqctx *ctx = ahash_request_ctx(dd->req); in atmel_sha_update_dma_stop()
647 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in atmel_sha_update_dma_stop()
654 dma_unmap_single(dd->dev, ctx->dma_addr, in atmel_sha_update_dma_stop()
658 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen + in atmel_sha_update_dma_stop()
665 static int atmel_sha_update_req(struct atmel_sha_dev *dd) in atmel_sha_update_req() argument
667 struct ahash_request *req = dd->req; in atmel_sha_update_req()
671 dev_dbg(dd->dev, "update_req: total: %u, digcnt: 0x%llx 0x%llx\n", in atmel_sha_update_req()
675 err = atmel_sha_update_cpu(dd); in atmel_sha_update_req()
677 err = atmel_sha_update_dma_start(dd); in atmel_sha_update_req()
680 dev_dbg(dd->dev, "update: err: %d, digcnt: 0x%llx 0%llx\n", in atmel_sha_update_req()
686 static int atmel_sha_final_req(struct atmel_sha_dev *dd) in atmel_sha_final_req() argument
688 struct ahash_request *req = dd->req; in atmel_sha_final_req()
697 err = atmel_sha_xmit_dma_map(dd, ctx, count, 1); in atmel_sha_final_req()
704 err = atmel_sha_xmit_cpu(dd, ctx->buffer, count, 1); in atmel_sha_final_req()
707 dev_dbg(dd->dev, "final_req: err: %d\n", err); in atmel_sha_final_req()
720 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
723 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
726 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
729 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
732 hash[i] = atmel_sha_read(ctx->dd, SHA_REG_DIGEST(i)); in atmel_sha_copy_hash()
757 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish() local
763 dev_dbg(dd->dev, "digcnt: 0x%llx 0x%llx, bufcnt: %d\n", ctx->digcnt[1], in atmel_sha_finish()
772 struct atmel_sha_dev *dd = ctx->dd; in atmel_sha_finish_req() local
776 if (SHA_FLAGS_FINAL & dd->flags) in atmel_sha_finish_req()
783 dd->flags &= ~(SHA_FLAGS_BUSY | SHA_FLAGS_FINAL | SHA_FLAGS_CPU | in atmel_sha_finish_req()
786 clk_disable(dd->iclk); in atmel_sha_finish_req()
792 tasklet_schedule(&dd->done_task); in atmel_sha_finish_req()
795 static int atmel_sha_hw_init(struct atmel_sha_dev *dd) in atmel_sha_hw_init() argument
799 err = clk_enable(dd->iclk); in atmel_sha_hw_init()
803 if (!(SHA_FLAGS_INIT & dd->flags)) { in atmel_sha_hw_init()
804 atmel_sha_write(dd, SHA_CR, SHA_CR_SWRST); in atmel_sha_hw_init()
805 dd->flags |= SHA_FLAGS_INIT; in atmel_sha_hw_init()
806 dd->err = 0; in atmel_sha_hw_init()
812 static inline unsigned int atmel_sha_get_version(struct atmel_sha_dev *dd) in atmel_sha_get_version() argument
814 return atmel_sha_read(dd, SHA_HW_VERSION) & 0x00000fff; in atmel_sha_get_version()
817 static void atmel_sha_hw_version_init(struct atmel_sha_dev *dd) in atmel_sha_hw_version_init() argument
819 atmel_sha_hw_init(dd); in atmel_sha_hw_version_init()
821 dd->hw_version = atmel_sha_get_version(dd); in atmel_sha_hw_version_init()
823 dev_info(dd->dev, in atmel_sha_hw_version_init()
824 "version: 0x%x\n", dd->hw_version); in atmel_sha_hw_version_init()
826 clk_disable(dd->iclk); in atmel_sha_hw_version_init()
829 static int atmel_sha_handle_queue(struct atmel_sha_dev *dd, in atmel_sha_handle_queue() argument
837 spin_lock_irqsave(&dd->lock, flags); in atmel_sha_handle_queue()
839 ret = ahash_enqueue_request(&dd->queue, req); in atmel_sha_handle_queue()
841 if (SHA_FLAGS_BUSY & dd->flags) { in atmel_sha_handle_queue()
842 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
846 backlog = crypto_get_backlog(&dd->queue); in atmel_sha_handle_queue()
847 async_req = crypto_dequeue_request(&dd->queue); in atmel_sha_handle_queue()
849 dd->flags |= SHA_FLAGS_BUSY; in atmel_sha_handle_queue()
851 spin_unlock_irqrestore(&dd->lock, flags); in atmel_sha_handle_queue()
860 dd->req = req; in atmel_sha_handle_queue()
863 dev_dbg(dd->dev, "handling new req, op: %lu, nbytes: %d\n", in atmel_sha_handle_queue()
866 err = atmel_sha_hw_init(dd); in atmel_sha_handle_queue()
872 err = atmel_sha_update_req(dd); in atmel_sha_handle_queue()
875 err = atmel_sha_final_req(dd); in atmel_sha_handle_queue()
877 err = atmel_sha_final_req(dd); in atmel_sha_handle_queue()
885 dev_dbg(dd->dev, "exit, err: %d\n", err); in atmel_sha_handle_queue()
894 struct atmel_sha_dev *dd = tctx->dd; in atmel_sha_enqueue() local
898 return atmel_sha_handle_queue(dd, req); in atmel_sha_enqueue()
927 struct atmel_sha_dev *dd = tctx->dd; in atmel_sha_final() local
939 err = atmel_sha_hw_init(dd); in atmel_sha_final()
943 dd->flags |= SHA_FLAGS_BUSY; in atmel_sha_final()
944 err = atmel_sha_final_req(dd); in atmel_sha_final()
1106 struct atmel_sha_dev *dd = (struct atmel_sha_dev *)data; in atmel_sha_done_task() local
1109 if (!(SHA_FLAGS_BUSY & dd->flags)) { in atmel_sha_done_task()
1110 atmel_sha_handle_queue(dd, NULL); in atmel_sha_done_task()
1114 if (SHA_FLAGS_CPU & dd->flags) { in atmel_sha_done_task()
1115 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done_task()
1116 dd->flags &= ~SHA_FLAGS_OUTPUT_READY; in atmel_sha_done_task()
1119 } else if (SHA_FLAGS_DMA_READY & dd->flags) { in atmel_sha_done_task()
1120 if (SHA_FLAGS_DMA_ACTIVE & dd->flags) { in atmel_sha_done_task()
1121 dd->flags &= ~SHA_FLAGS_DMA_ACTIVE; in atmel_sha_done_task()
1122 atmel_sha_update_dma_stop(dd); in atmel_sha_done_task()
1123 if (dd->err) { in atmel_sha_done_task()
1124 err = dd->err; in atmel_sha_done_task()
1128 if (SHA_FLAGS_OUTPUT_READY & dd->flags) { in atmel_sha_done_task()
1130 dd->flags &= ~(SHA_FLAGS_DMA_READY | in atmel_sha_done_task()
1132 err = atmel_sha_update_dma_start(dd); in atmel_sha_done_task()
1141 atmel_sha_finish_req(dd->req, err); in atmel_sha_done_task()
1166 static void atmel_sha_unregister_algs(struct atmel_sha_dev *dd) in atmel_sha_unregister_algs() argument
1173 if (dd->caps.has_sha224) in atmel_sha_unregister_algs()
1176 if (dd->caps.has_sha_384_512) { in atmel_sha_unregister_algs()
1182 static int atmel_sha_register_algs(struct atmel_sha_dev *dd) in atmel_sha_register_algs() argument
1192 if (dd->caps.has_sha224) { in atmel_sha_register_algs()
1198 if (dd->caps.has_sha_384_512) { in atmel_sha_register_algs()
1233 static int atmel_sha_dma_init(struct atmel_sha_dev *dd, in atmel_sha_dma_init() argument
1243 dd->dma_lch_in.chan = dma_request_slave_channel_compat(mask_in, in atmel_sha_dma_init()
1244 atmel_sha_filter, &pdata->dma_slave->rxdata, dd->dev, "tx"); in atmel_sha_dma_init()
1245 if (!dd->dma_lch_in.chan) { in atmel_sha_dma_init()
1246 dev_warn(dd->dev, "no DMA channel available\n"); in atmel_sha_dma_init()
1250 dd->dma_lch_in.dma_conf.direction = DMA_MEM_TO_DEV; in atmel_sha_dma_init()
1251 dd->dma_lch_in.dma_conf.dst_addr = dd->phys_base + in atmel_sha_dma_init()
1253 dd->dma_lch_in.dma_conf.src_maxburst = 1; in atmel_sha_dma_init()
1254 dd->dma_lch_in.dma_conf.src_addr_width = in atmel_sha_dma_init()
1256 dd->dma_lch_in.dma_conf.dst_maxburst = 1; in atmel_sha_dma_init()
1257 dd->dma_lch_in.dma_conf.dst_addr_width = in atmel_sha_dma_init()
1259 dd->dma_lch_in.dma_conf.device_fc = false; in atmel_sha_dma_init()
1264 static void atmel_sha_dma_cleanup(struct atmel_sha_dev *dd) in atmel_sha_dma_cleanup() argument
1266 dma_release_channel(dd->dma_lch_in.chan); in atmel_sha_dma_cleanup()
1269 static void atmel_sha_get_cap(struct atmel_sha_dev *dd) in atmel_sha_get_cap() argument
1272 dd->caps.has_dma = 0; in atmel_sha_get_cap()
1273 dd->caps.has_dualbuff = 0; in atmel_sha_get_cap()
1274 dd->caps.has_sha224 = 0; in atmel_sha_get_cap()
1275 dd->caps.has_sha_384_512 = 0; in atmel_sha_get_cap()
1278 switch (dd->hw_version & 0xff0) { in atmel_sha_get_cap()
1280 dd->caps.has_dma = 1; in atmel_sha_get_cap()
1281 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
1282 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
1283 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
1286 dd->caps.has_dma = 1; in atmel_sha_get_cap()
1287 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
1288 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
1289 dd->caps.has_sha_384_512 = 1; in atmel_sha_get_cap()
1292 dd->caps.has_dma = 1; in atmel_sha_get_cap()
1293 dd->caps.has_dualbuff = 1; in atmel_sha_get_cap()
1294 dd->caps.has_sha224 = 1; in atmel_sha_get_cap()
1299 dev_warn(dd->dev, in atmel_sha_get_cap()