Lines Matching refs:dd
49 #define SHA_REG_IDIGEST(dd, x) ((dd)->pdata->idigest_ofs + ((x)*0x04)) argument
50 #define SHA_REG_DIN(dd, x) ((dd)->pdata->din_ofs + ((x) * 0x04)) argument
51 #define SHA_REG_DIGCNT(dd) ((dd)->pdata->digcnt_ofs) argument
53 #define SHA_REG_ODIGEST(dd, x) ((dd)->pdata->odigest_ofs + (x * 0x04)) argument
63 #define SHA_REG_REV(dd) ((dd)->pdata->rev_ofs) argument
65 #define SHA_REG_MASK(dd) ((dd)->pdata->mask_ofs) argument
71 #define SHA_REG_SYSSTATUS(dd) ((dd)->pdata->sysstatus_ofs) argument
74 #define SHA_REG_MODE(dd) ((dd)->pdata->mode_ofs) argument
88 #define SHA_REG_LENGTH(dd) ((dd)->pdata->length_ofs) argument
141 struct omap_sham_dev *dd; member
167 struct omap_sham_dev *dd; member
192 void (*write_ctrl)(struct omap_sham_dev *dd, size_t length,
194 void (*trigger)(struct omap_sham_dev *dd, size_t length);
195 int (*poll_irq)(struct omap_sham_dev *dd);
245 static inline u32 omap_sham_read(struct omap_sham_dev *dd, u32 offset) in omap_sham_read() argument
247 return __raw_readl(dd->io_base + offset); in omap_sham_read()
250 static inline void omap_sham_write(struct omap_sham_dev *dd, in omap_sham_write() argument
253 __raw_writel(value, dd->io_base + offset); in omap_sham_write()
256 static inline void omap_sham_write_mask(struct omap_sham_dev *dd, u32 address, in omap_sham_write_mask() argument
261 val = omap_sham_read(dd, address); in omap_sham_write_mask()
264 omap_sham_write(dd, address, val); in omap_sham_write_mask()
267 static inline int omap_sham_wait(struct omap_sham_dev *dd, u32 offset, u32 bit) in omap_sham_wait() argument
271 while (!(omap_sham_read(dd, offset) & bit)) { in omap_sham_wait()
282 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap2() local
286 for (i = 0; i < dd->pdata->digest_size / sizeof(u32); i++) { in omap_sham_copy_hash_omap2()
288 hash[i] = omap_sham_read(dd, SHA_REG_IDIGEST(dd, i)); in omap_sham_copy_hash_omap2()
290 omap_sham_write(dd, SHA_REG_IDIGEST(dd, i), hash[i]); in omap_sham_copy_hash_omap2()
297 struct omap_sham_dev *dd = ctx->dd; in omap_sham_copy_hash_omap4() local
301 struct crypto_ahash *tfm = crypto_ahash_reqtfm(dd->req); in omap_sham_copy_hash_omap4()
306 for (i = 0; i < dd->pdata->digest_size / sizeof(u32); i++) { in omap_sham_copy_hash_omap4()
308 opad[i] = omap_sham_read(dd, in omap_sham_copy_hash_omap4()
309 SHA_REG_ODIGEST(dd, i)); in omap_sham_copy_hash_omap4()
311 omap_sham_write(dd, SHA_REG_ODIGEST(dd, i), in omap_sham_copy_hash_omap4()
335 if (test_bit(FLAGS_BE32_SHA1, &ctx->dd->flags)) in omap_sham_copy_ready_hash()
363 static int omap_sham_hw_init(struct omap_sham_dev *dd) in omap_sham_hw_init() argument
365 pm_runtime_get_sync(dd->dev); in omap_sham_hw_init()
367 if (!test_bit(FLAGS_INIT, &dd->flags)) { in omap_sham_hw_init()
368 set_bit(FLAGS_INIT, &dd->flags); in omap_sham_hw_init()
369 dd->err = 0; in omap_sham_hw_init()
375 static void omap_sham_write_ctrl_omap2(struct omap_sham_dev *dd, size_t length, in omap_sham_write_ctrl_omap2() argument
378 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_write_ctrl_omap2()
382 omap_sham_write(dd, SHA_REG_DIGCNT(dd), ctx->digcnt); in omap_sham_write_ctrl_omap2()
384 omap_sham_write_mask(dd, SHA_REG_MASK(dd), in omap_sham_write_ctrl_omap2()
401 omap_sham_write_mask(dd, SHA_REG_CTRL, val, mask); in omap_sham_write_ctrl_omap2()
404 static void omap_sham_trigger_omap2(struct omap_sham_dev *dd, size_t length) in omap_sham_trigger_omap2() argument
408 static int omap_sham_poll_irq_omap2(struct omap_sham_dev *dd) in omap_sham_poll_irq_omap2() argument
410 return omap_sham_wait(dd, SHA_REG_CTRL, SHA_REG_CTRL_INPUT_READY); in omap_sham_poll_irq_omap2()
437 static void omap_sham_write_n(struct omap_sham_dev *dd, u32 offset, in omap_sham_write_n() argument
441 omap_sham_write(dd, offset, *value); in omap_sham_write_n()
444 static void omap_sham_write_ctrl_omap4(struct omap_sham_dev *dd, size_t length, in omap_sham_write_ctrl_omap4() argument
447 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_write_ctrl_omap4()
457 struct crypto_ahash *tfm = crypto_ahash_reqtfm(dd->req); in omap_sham_write_ctrl_omap4()
468 omap_sham_write_n(dd, SHA_REG_ODIGEST(dd, 0), in omap_sham_write_ctrl_omap4()
470 omap_sham_write_n(dd, SHA_REG_IDIGEST(dd, 0), in omap_sham_write_ctrl_omap4()
487 dev_dbg(dd->dev, "ctrl: %08x, flags: %08lx\n", val, ctx->flags); in omap_sham_write_ctrl_omap4()
488 omap_sham_write_mask(dd, SHA_REG_MODE(dd), val, mask); in omap_sham_write_ctrl_omap4()
489 omap_sham_write(dd, SHA_REG_IRQENA, SHA_REG_IRQENA_OUTPUT_RDY); in omap_sham_write_ctrl_omap4()
490 omap_sham_write_mask(dd, SHA_REG_MASK(dd), in omap_sham_write_ctrl_omap4()
496 static void omap_sham_trigger_omap4(struct omap_sham_dev *dd, size_t length) in omap_sham_trigger_omap4() argument
498 omap_sham_write(dd, SHA_REG_LENGTH(dd), length); in omap_sham_trigger_omap4()
501 static int omap_sham_poll_irq_omap4(struct omap_sham_dev *dd) in omap_sham_poll_irq_omap4() argument
503 return omap_sham_wait(dd, SHA_REG_IRQSTATUS, in omap_sham_poll_irq_omap4()
507 static int omap_sham_xmit_cpu(struct omap_sham_dev *dd, const u8 *buf, in omap_sham_xmit_cpu() argument
510 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_xmit_cpu()
514 dev_dbg(dd->dev, "xmit_cpu: digcnt: %d, length: %d, final: %d\n", in omap_sham_xmit_cpu()
517 dd->pdata->write_ctrl(dd, length, final, 0); in omap_sham_xmit_cpu()
518 dd->pdata->trigger(dd, length); in omap_sham_xmit_cpu()
524 set_bit(FLAGS_FINAL, &dd->flags); /* catch last interrupt */ in omap_sham_xmit_cpu()
526 set_bit(FLAGS_CPU, &dd->flags); in omap_sham_xmit_cpu()
532 if (dd->pdata->poll_irq(dd)) in omap_sham_xmit_cpu()
536 omap_sham_write(dd, SHA_REG_DIN(dd, count), in omap_sham_xmit_cpu()
546 struct omap_sham_dev *dd = param; in omap_sham_dma_callback() local
548 set_bit(FLAGS_DMA_READY, &dd->flags); in omap_sham_dma_callback()
549 tasklet_schedule(&dd->done_task); in omap_sham_dma_callback()
552 static int omap_sham_xmit_dma(struct omap_sham_dev *dd, dma_addr_t dma_addr, in omap_sham_xmit_dma() argument
555 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_xmit_dma()
560 dev_dbg(dd->dev, "xmit_dma: digcnt: %d, length: %d, final: %d\n", in omap_sham_xmit_dma()
565 cfg.dst_addr = dd->phys_base + SHA_REG_DIN(dd, 0); in omap_sham_xmit_dma()
569 ret = dmaengine_slave_config(dd->dma_lch, &cfg); in omap_sham_xmit_dma()
590 tx = dmaengine_prep_slave_sg(dd->dma_lch, &ctx->sgl, 1, in omap_sham_xmit_dma()
593 tx = dmaengine_prep_slave_single(dd->dma_lch, dma_addr, len32, in omap_sham_xmit_dma()
598 dev_err(dd->dev, "prep_slave_sg/single() failed\n"); in omap_sham_xmit_dma()
603 tx->callback_param = dd; in omap_sham_xmit_dma()
605 dd->pdata->write_ctrl(dd, length, final, 1); in omap_sham_xmit_dma()
610 set_bit(FLAGS_FINAL, &dd->flags); /* catch last interrupt */ in omap_sham_xmit_dma()
612 set_bit(FLAGS_DMA_ACTIVE, &dd->flags); in omap_sham_xmit_dma()
615 dma_async_issue_pending(dd->dma_lch); in omap_sham_xmit_dma()
617 dd->pdata->trigger(dd, length); in omap_sham_xmit_dma()
667 static int omap_sham_xmit_dma_map(struct omap_sham_dev *dd, in omap_sham_xmit_dma_map() argument
673 ctx->dma_addr = dma_map_single(dd->dev, ctx->buffer, ctx->buflen, in omap_sham_xmit_dma_map()
675 if (dma_mapping_error(dd->dev, ctx->dma_addr)) { in omap_sham_xmit_dma_map()
676 dev_err(dd->dev, "dma %u bytes error\n", ctx->buflen); in omap_sham_xmit_dma_map()
682 ret = omap_sham_xmit_dma(dd, ctx->dma_addr, length, final, 0); in omap_sham_xmit_dma_map()
684 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen, in omap_sham_xmit_dma_map()
690 static int omap_sham_update_dma_slow(struct omap_sham_dev *dd) in omap_sham_update_dma_slow() argument
692 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_update_dma_slow()
700 dev_dbg(dd->dev, "slow: bufcnt: %u, digcnt: %d, final: %d\n", in omap_sham_update_dma_slow()
706 return omap_sham_xmit_dma_map(dd, ctx, count, final); in omap_sham_update_dma_slow()
717 static int omap_sham_update_dma_start(struct omap_sham_dev *dd) in omap_sham_update_dma_start() argument
719 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_update_dma_start()
728 return omap_sham_update_dma_slow(dd); in omap_sham_update_dma_start()
737 return omap_sham_update_dma_slow(dd); in omap_sham_update_dma_start()
739 dev_dbg(dd->dev, "fast: digcnt: %d, bufcnt: %u, total: %u\n", in omap_sham_update_dma_start()
746 return omap_sham_update_dma_slow(dd); in omap_sham_update_dma_start()
750 return omap_sham_update_dma_slow(dd); in omap_sham_update_dma_start()
765 if (!dma_map_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE)) { in omap_sham_update_dma_start()
766 dev_err(dd->dev, "dma_map_sg error\n"); in omap_sham_update_dma_start()
777 ret = omap_sham_xmit_dma(dd, sg_dma_address(ctx->sg), length, final, 1); in omap_sham_update_dma_start()
779 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in omap_sham_update_dma_start()
784 static int omap_sham_update_cpu(struct omap_sham_dev *dd) in omap_sham_update_cpu() argument
786 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_update_cpu()
796 dev_dbg(dd->dev, "cpu: bufcnt: %u, digcnt: %d, final: %d\n", in omap_sham_update_cpu()
802 return omap_sham_xmit_cpu(dd, ctx->buffer, bufcnt, final); in omap_sham_update_cpu()
808 static int omap_sham_update_dma_stop(struct omap_sham_dev *dd) in omap_sham_update_dma_stop() argument
810 struct omap_sham_reqctx *ctx = ahash_request_ctx(dd->req); in omap_sham_update_dma_stop()
812 dmaengine_terminate_all(dd->dma_lch); in omap_sham_update_dma_stop()
815 dma_unmap_sg(dd->dev, ctx->sg, 1, DMA_TO_DEVICE); in omap_sham_update_dma_stop()
822 dma_unmap_single(dd->dev, ctx->dma_addr, ctx->buflen, in omap_sham_update_dma_stop()
834 struct omap_sham_dev *dd = NULL, *tmp; in omap_sham_init() local
838 if (!tctx->dd) { in omap_sham_init()
840 dd = tmp; in omap_sham_init()
843 tctx->dd = dd; in omap_sham_init()
845 dd = tctx->dd; in omap_sham_init()
849 ctx->dd = dd; in omap_sham_init()
853 dev_dbg(dd->dev, "init: digest size: %d\n", in omap_sham_init()
888 if (!test_bit(FLAGS_AUTO_XOR, &dd->flags)) { in omap_sham_init()
902 static int omap_sham_update_req(struct omap_sham_dev *dd) in omap_sham_update_req() argument
904 struct ahash_request *req = dd->req; in omap_sham_update_req()
908 dev_dbg(dd->dev, "update_req: total: %u, digcnt: %d, finup: %d\n", in omap_sham_update_req()
912 err = omap_sham_update_cpu(dd); in omap_sham_update_req()
914 err = omap_sham_update_dma_start(dd); in omap_sham_update_req()
917 dev_dbg(dd->dev, "update: err: %d, digcnt: %d\n", err, ctx->digcnt); in omap_sham_update_req()
922 static int omap_sham_final_req(struct omap_sham_dev *dd) in omap_sham_final_req() argument
924 struct ahash_request *req = dd->req; in omap_sham_final_req()
928 if ((ctx->bufcnt <= get_block_size(ctx)) || dd->polling_mode) in omap_sham_final_req()
936 err = omap_sham_xmit_dma_map(dd, ctx, ctx->bufcnt, 1); in omap_sham_final_req()
938 err = omap_sham_xmit_cpu(dd, ctx->buffer, ctx->bufcnt, 1); in omap_sham_final_req()
942 dev_dbg(dd->dev, "final_req: err: %d\n", err); in omap_sham_final_req()
966 struct omap_sham_dev *dd = ctx->dd; in omap_sham_finish() local
972 !test_bit(FLAGS_AUTO_XOR, &dd->flags)) in omap_sham_finish()
976 dev_dbg(dd->dev, "digcnt: %d, bufcnt: %d\n", ctx->digcnt, ctx->bufcnt); in omap_sham_finish()
984 struct omap_sham_dev *dd = ctx->dd; in omap_sham_finish_req() local
987 dd->pdata->copy_hash(req, 1); in omap_sham_finish_req()
988 if (test_bit(FLAGS_FINAL, &dd->flags)) in omap_sham_finish_req()
995 dd->flags &= ~(BIT(FLAGS_BUSY) | BIT(FLAGS_FINAL) | BIT(FLAGS_CPU) | in omap_sham_finish_req()
998 pm_runtime_put(dd->dev); in omap_sham_finish_req()
1004 tasklet_schedule(&dd->done_task); in omap_sham_finish_req()
1007 static int omap_sham_handle_queue(struct omap_sham_dev *dd, in omap_sham_handle_queue() argument
1015 spin_lock_irqsave(&dd->lock, flags); in omap_sham_handle_queue()
1017 ret = ahash_enqueue_request(&dd->queue, req); in omap_sham_handle_queue()
1018 if (test_bit(FLAGS_BUSY, &dd->flags)) { in omap_sham_handle_queue()
1019 spin_unlock_irqrestore(&dd->lock, flags); in omap_sham_handle_queue()
1022 backlog = crypto_get_backlog(&dd->queue); in omap_sham_handle_queue()
1023 async_req = crypto_dequeue_request(&dd->queue); in omap_sham_handle_queue()
1025 set_bit(FLAGS_BUSY, &dd->flags); in omap_sham_handle_queue()
1026 spin_unlock_irqrestore(&dd->lock, flags); in omap_sham_handle_queue()
1035 dd->req = req; in omap_sham_handle_queue()
1038 dev_dbg(dd->dev, "handling new req, op: %lu, nbytes: %d\n", in omap_sham_handle_queue()
1041 err = omap_sham_hw_init(dd); in omap_sham_handle_queue()
1047 dd->pdata->copy_hash(req, 0); in omap_sham_handle_queue()
1050 err = omap_sham_update_req(dd); in omap_sham_handle_queue()
1053 err = omap_sham_final_req(dd); in omap_sham_handle_queue()
1055 err = omap_sham_final_req(dd); in omap_sham_handle_queue()
1062 dev_dbg(dd->dev, "exit, err: %d\n", err); in omap_sham_handle_queue()
1071 struct omap_sham_dev *dd = tctx->dd; in omap_sham_enqueue() local
1075 return omap_sham_handle_queue(dd, req); in omap_sham_enqueue()
1081 struct omap_sham_dev *dd = ctx->dd; in omap_sham_update() local
1101 dd->polling_mode) { in omap_sham_update()
1113 if (dd->polling_mode) in omap_sham_update()
1190 struct omap_sham_dev *dd = NULL, *tmp; in omap_sham_setkey() local
1194 if (!tctx->dd) { in omap_sham_setkey()
1196 dd = tmp; in omap_sham_setkey()
1199 tctx->dd = dd; in omap_sham_setkey()
1201 dd = tctx->dd; in omap_sham_setkey()
1222 if (!test_bit(FLAGS_AUTO_XOR, &dd->flags)) { in omap_sham_setkey()
1608 struct omap_sham_dev *dd = (struct omap_sham_dev *)data; in omap_sham_done_task() local
1611 if (!test_bit(FLAGS_BUSY, &dd->flags)) { in omap_sham_done_task()
1612 omap_sham_handle_queue(dd, NULL); in omap_sham_done_task()
1616 if (test_bit(FLAGS_CPU, &dd->flags)) { in omap_sham_done_task()
1617 if (test_and_clear_bit(FLAGS_OUTPUT_READY, &dd->flags)) { in omap_sham_done_task()
1619 err = omap_sham_update_cpu(dd); in omap_sham_done_task()
1623 } else if (test_bit(FLAGS_DMA_READY, &dd->flags)) { in omap_sham_done_task()
1624 if (test_and_clear_bit(FLAGS_DMA_ACTIVE, &dd->flags)) { in omap_sham_done_task()
1625 omap_sham_update_dma_stop(dd); in omap_sham_done_task()
1626 if (dd->err) { in omap_sham_done_task()
1627 err = dd->err; in omap_sham_done_task()
1631 if (test_and_clear_bit(FLAGS_OUTPUT_READY, &dd->flags)) { in omap_sham_done_task()
1633 clear_bit(FLAGS_DMA_READY, &dd->flags); in omap_sham_done_task()
1634 err = omap_sham_update_dma_start(dd); in omap_sham_done_task()
1643 dev_dbg(dd->dev, "update done: err: %d\n", err); in omap_sham_done_task()
1645 omap_sham_finish_req(dd->req, err); in omap_sham_done_task()
1648 static irqreturn_t omap_sham_irq_common(struct omap_sham_dev *dd) in omap_sham_irq_common() argument
1650 if (!test_bit(FLAGS_BUSY, &dd->flags)) { in omap_sham_irq_common()
1651 dev_warn(dd->dev, "Interrupt when no active requests.\n"); in omap_sham_irq_common()
1653 set_bit(FLAGS_OUTPUT_READY, &dd->flags); in omap_sham_irq_common()
1654 tasklet_schedule(&dd->done_task); in omap_sham_irq_common()
1662 struct omap_sham_dev *dd = dev_id; in omap_sham_irq_omap2() local
1664 if (unlikely(test_bit(FLAGS_FINAL, &dd->flags))) in omap_sham_irq_omap2()
1666 omap_sham_write_mask(dd, SHA_REG_CTRL, 0, SHA_REG_CTRL_LENGTH); in omap_sham_irq_omap2()
1668 omap_sham_write_mask(dd, SHA_REG_CTRL, SHA_REG_CTRL_OUTPUT_READY, in omap_sham_irq_omap2()
1670 omap_sham_read(dd, SHA_REG_CTRL); in omap_sham_irq_omap2()
1672 return omap_sham_irq_common(dd); in omap_sham_irq_omap2()
1677 struct omap_sham_dev *dd = dev_id; in omap_sham_irq_omap4() local
1679 omap_sham_write_mask(dd, SHA_REG_MASK(dd), 0, SHA_REG_MASK_IT_EN); in omap_sham_irq_omap4()
1681 return omap_sham_irq_common(dd); in omap_sham_irq_omap4()
1807 static int omap_sham_get_res_of(struct omap_sham_dev *dd, in omap_sham_get_res_of() argument
1828 dd->irq = irq_of_parse_and_map(node, 0); in omap_sham_get_res_of()
1829 if (!dd->irq) { in omap_sham_get_res_of()
1835 dd->dma = -1; /* Dummy value that's unused */ in omap_sham_get_res_of()
1836 dd->pdata = match->data; in omap_sham_get_res_of()
1846 static int omap_sham_get_res_of(struct omap_sham_dev *dd, in omap_sham_get_res_of() argument
1853 static int omap_sham_get_res_pdev(struct omap_sham_dev *dd, in omap_sham_get_res_pdev() argument
1870 dd->irq = platform_get_irq(pdev, 0); in omap_sham_get_res_pdev()
1871 if (dd->irq < 0) { in omap_sham_get_res_pdev()
1873 err = dd->irq; in omap_sham_get_res_pdev()
1884 dd->dma = r->start; in omap_sham_get_res_pdev()
1887 dd->pdata = &omap_sham_pdata_omap2; in omap_sham_get_res_pdev()
1895 struct omap_sham_dev *dd; in omap_sham_probe() local
1902 dd = devm_kzalloc(dev, sizeof(struct omap_sham_dev), GFP_KERNEL); in omap_sham_probe()
1903 if (dd == NULL) { in omap_sham_probe()
1908 dd->dev = dev; in omap_sham_probe()
1909 platform_set_drvdata(pdev, dd); in omap_sham_probe()
1911 INIT_LIST_HEAD(&dd->list); in omap_sham_probe()
1912 spin_lock_init(&dd->lock); in omap_sham_probe()
1913 tasklet_init(&dd->done_task, omap_sham_done_task, (unsigned long)dd); in omap_sham_probe()
1914 crypto_init_queue(&dd->queue, OMAP_SHAM_QUEUE_LENGTH); in omap_sham_probe()
1916 err = (dev->of_node) ? omap_sham_get_res_of(dd, dev, &res) : in omap_sham_probe()
1917 omap_sham_get_res_pdev(dd, pdev, &res); in omap_sham_probe()
1921 dd->io_base = devm_ioremap_resource(dev, &res); in omap_sham_probe()
1922 if (IS_ERR(dd->io_base)) { in omap_sham_probe()
1923 err = PTR_ERR(dd->io_base); in omap_sham_probe()
1926 dd->phys_base = res.start; in omap_sham_probe()
1928 err = devm_request_irq(dev, dd->irq, dd->pdata->intr_hdlr, in omap_sham_probe()
1929 IRQF_TRIGGER_NONE, dev_name(dev), dd); in omap_sham_probe()
1932 dd->irq, err); in omap_sham_probe()
1939 dd->dma_lch = dma_request_slave_channel_compat(mask, omap_dma_filter_fn, in omap_sham_probe()
1940 &dd->dma, dev, "rx"); in omap_sham_probe()
1941 if (!dd->dma_lch) { in omap_sham_probe()
1942 dd->polling_mode = 1; in omap_sham_probe()
1946 dd->flags |= dd->pdata->flags; in omap_sham_probe()
1951 rev = omap_sham_read(dd, SHA_REG_REV(dd)); in omap_sham_probe()
1955 (rev & dd->pdata->major_mask) >> dd->pdata->major_shift, in omap_sham_probe()
1956 (rev & dd->pdata->minor_mask) >> dd->pdata->minor_shift); in omap_sham_probe()
1959 list_add_tail(&dd->list, &sham.dev_list); in omap_sham_probe()
1962 for (i = 0; i < dd->pdata->algs_info_size; i++) { in omap_sham_probe()
1963 for (j = 0; j < dd->pdata->algs_info[i].size; j++) { in omap_sham_probe()
1965 &dd->pdata->algs_info[i].algs_list[j]); in omap_sham_probe()
1969 dd->pdata->algs_info[i].registered++; in omap_sham_probe()
1976 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_sham_probe()
1977 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) in omap_sham_probe()
1979 &dd->pdata->algs_info[i].algs_list[j]); in omap_sham_probe()
1981 if (dd->dma_lch) in omap_sham_probe()
1982 dma_release_channel(dd->dma_lch); in omap_sham_probe()
1991 static struct omap_sham_dev *dd; in omap_sham_remove() local
1994 dd = platform_get_drvdata(pdev); in omap_sham_remove()
1995 if (!dd) in omap_sham_remove()
1998 list_del(&dd->list); in omap_sham_remove()
2000 for (i = dd->pdata->algs_info_size - 1; i >= 0; i--) in omap_sham_remove()
2001 for (j = dd->pdata->algs_info[i].registered - 1; j >= 0; j--) in omap_sham_remove()
2003 &dd->pdata->algs_info[i].algs_list[j]); in omap_sham_remove()
2004 tasklet_kill(&dd->done_task); in omap_sham_remove()
2007 if (dd->dma_lch) in omap_sham_remove()
2008 dma_release_channel(dd->dma_lch); in omap_sham_remove()