sdcp 171 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 175 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; sdcp 177 drivers/crypto/mxs-dcp.c dma_addr_t desc_phys = dma_map_single(sdcp->dev, desc, sizeof(*desc), sdcp 180 drivers/crypto/mxs-dcp.c reinit_completion(&sdcp->completion[chan]); sdcp 183 drivers/crypto/mxs-dcp.c writel(0xffffffff, sdcp->base + MXS_DCP_CH_N_STAT_CLR(chan)); sdcp 186 drivers/crypto/mxs-dcp.c writel(desc_phys, sdcp->base + MXS_DCP_CH_N_CMDPTR(chan)); sdcp 189 drivers/crypto/mxs-dcp.c writel(1, sdcp->base + MXS_DCP_CH_N_SEMA(chan)); sdcp 191 drivers/crypto/mxs-dcp.c ret = wait_for_completion_timeout(&sdcp->completion[chan], sdcp 194 drivers/crypto/mxs-dcp.c dev_err(sdcp->dev, "Channel %i timeout (DCP_STAT=0x%08x)\n", sdcp 195 drivers/crypto/mxs-dcp.c chan, readl(sdcp->base + MXS_DCP_STAT)); sdcp 199 drivers/crypto/mxs-dcp.c stat = readl(sdcp->base + MXS_DCP_CH_N_STAT(chan)); sdcp 201 drivers/crypto/mxs-dcp.c dev_err(sdcp->dev, "Channel %i error (CH_STAT=0x%08x)\n", sdcp 206 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, desc_phys, sizeof(*desc), DMA_TO_DEVICE); sdcp 217 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 218 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; sdcp 222 drivers/crypto/mxs-dcp.c dma_addr_t key_phys = dma_map_single(sdcp->dev, sdcp->coh->aes_key, sdcp 225 drivers/crypto/mxs-dcp.c dma_addr_t src_phys = dma_map_single(sdcp->dev, sdcp->coh->aes_in_buf, sdcp 227 drivers/crypto/mxs-dcp.c dma_addr_t dst_phys = dma_map_single(sdcp->dev, sdcp->coh->aes_out_buf, sdcp 231 drivers/crypto/mxs-dcp.c dev_err(sdcp->dev, "Invalid block size!\n"); sdcp 266 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, key_phys, 2 * AES_KEYSIZE_128, sdcp 268 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, src_phys, DCP_BUF_SZ, DMA_TO_DEVICE); sdcp 269 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, dst_phys, DCP_BUF_SZ, DMA_FROM_DEVICE); sdcp 276 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 287 drivers/crypto/mxs-dcp.c uint8_t *in_buf = sdcp->coh->aes_in_buf; sdcp 288 drivers/crypto/mxs-dcp.c uint8_t *out_buf = sdcp->coh->aes_out_buf; sdcp 294 drivers/crypto/mxs-dcp.c uint8_t *key = sdcp->coh->aes_key; sdcp 391 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 402 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[chan]); sdcp 403 drivers/crypto/mxs-dcp.c backlog = crypto_get_backlog(&sdcp->queue[chan]); sdcp 404 drivers/crypto/mxs-dcp.c arq = crypto_dequeue_request(&sdcp->queue[chan]); sdcp 405 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[chan]); sdcp 450 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 463 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[actx->chan]); sdcp 464 drivers/crypto/mxs-dcp.c ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); sdcp 465 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[actx->chan]); sdcp 467 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); sdcp 556 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 562 drivers/crypto/mxs-dcp.c struct dcp_dma_desc *desc = &sdcp->coh->desc[actx->chan]; sdcp 565 drivers/crypto/mxs-dcp.c dma_addr_t buf_phys = dma_map_single(sdcp->dev, sdcp->coh->sha_in_buf, sdcp 591 drivers/crypto/mxs-dcp.c memcpy(sdcp->coh->sha_out_buf, sha_buf, halg->digestsize); sdcp 598 drivers/crypto/mxs-dcp.c digest_phys = dma_map_single(sdcp->dev, sdcp->coh->sha_out_buf, sdcp 607 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, digest_phys, DCP_SHA_PAY_SZ, sdcp 611 drivers/crypto/mxs-dcp.c dma_unmap_single(sdcp->dev, buf_phys, DCP_BUF_SZ, DMA_TO_DEVICE); sdcp 618 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 626 drivers/crypto/mxs-dcp.c uint8_t *in_buf = sdcp->coh->sha_in_buf; sdcp 627 drivers/crypto/mxs-dcp.c uint8_t *out_buf = sdcp->coh->sha_out_buf; sdcp 690 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 700 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[chan]); sdcp 701 drivers/crypto/mxs-dcp.c backlog = crypto_get_backlog(&sdcp->queue[chan]); sdcp 702 drivers/crypto/mxs-dcp.c arq = crypto_dequeue_request(&sdcp->queue[chan]); sdcp 703 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[chan]); sdcp 753 drivers/crypto/mxs-dcp.c struct dcp *sdcp = global_sdcp; sdcp 777 drivers/crypto/mxs-dcp.c spin_lock(&sdcp->lock[actx->chan]); sdcp 778 drivers/crypto/mxs-dcp.c ret = crypto_enqueue_request(&sdcp->queue[actx->chan], &req->base); sdcp 779 drivers/crypto/mxs-dcp.c spin_unlock(&sdcp->lock[actx->chan]); sdcp 781 drivers/crypto/mxs-dcp.c wake_up_process(sdcp->thread[actx->chan]); sdcp 962 drivers/crypto/mxs-dcp.c struct dcp *sdcp = context; sdcp 966 drivers/crypto/mxs-dcp.c stat = readl(sdcp->base + MXS_DCP_STAT); sdcp 972 drivers/crypto/mxs-dcp.c writel(stat, sdcp->base + MXS_DCP_STAT_CLR); sdcp 977 drivers/crypto/mxs-dcp.c complete(&sdcp->completion[i]); sdcp 985 drivers/crypto/mxs-dcp.c struct dcp *sdcp = NULL; sdcp 1002 drivers/crypto/mxs-dcp.c sdcp = devm_kzalloc(dev, sizeof(*sdcp), GFP_KERNEL); sdcp 1003 drivers/crypto/mxs-dcp.c if (!sdcp) sdcp 1006 drivers/crypto/mxs-dcp.c sdcp->dev = dev; sdcp 1007 drivers/crypto/mxs-dcp.c sdcp->base = devm_platform_ioremap_resource(pdev, 0); sdcp 1008 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->base)) sdcp 1009 drivers/crypto/mxs-dcp.c return PTR_ERR(sdcp->base); sdcp 1013 drivers/crypto/mxs-dcp.c "dcp-vmi-irq", sdcp); sdcp 1020 drivers/crypto/mxs-dcp.c "dcp-irq", sdcp); sdcp 1027 drivers/crypto/mxs-dcp.c sdcp->coh = devm_kzalloc(dev, sizeof(*sdcp->coh) + DCP_ALIGNMENT, sdcp 1029 drivers/crypto/mxs-dcp.c if (!sdcp->coh) sdcp 1033 drivers/crypto/mxs-dcp.c sdcp->coh = PTR_ALIGN(sdcp->coh, DCP_ALIGNMENT); sdcp 1036 drivers/crypto/mxs-dcp.c sdcp->dcp_clk = devm_clk_get(dev, "dcp"); sdcp 1037 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->dcp_clk)) { sdcp 1038 drivers/crypto/mxs-dcp.c if (sdcp->dcp_clk != ERR_PTR(-ENOENT)) sdcp 1039 drivers/crypto/mxs-dcp.c return PTR_ERR(sdcp->dcp_clk); sdcp 1040 drivers/crypto/mxs-dcp.c sdcp->dcp_clk = NULL; sdcp 1042 drivers/crypto/mxs-dcp.c ret = clk_prepare_enable(sdcp->dcp_clk); sdcp 1047 drivers/crypto/mxs-dcp.c ret = stmp_reset_block(sdcp->base); sdcp 1056 drivers/crypto/mxs-dcp.c sdcp->base + MXS_DCP_CTRL); sdcp 1060 drivers/crypto/mxs-dcp.c sdcp->base + MXS_DCP_CHANNELCTRL); sdcp 1069 drivers/crypto/mxs-dcp.c writel(0xffff0000, sdcp->base + MXS_DCP_CONTEXT); sdcp 1071 drivers/crypto/mxs-dcp.c writel(0xffffffff, sdcp->base + MXS_DCP_CH_N_STAT_CLR(i)); sdcp 1072 drivers/crypto/mxs-dcp.c writel(0xffffffff, sdcp->base + MXS_DCP_STAT_CLR); sdcp 1074 drivers/crypto/mxs-dcp.c global_sdcp = sdcp; sdcp 1076 drivers/crypto/mxs-dcp.c platform_set_drvdata(pdev, sdcp); sdcp 1079 drivers/crypto/mxs-dcp.c spin_lock_init(&sdcp->lock[i]); sdcp 1080 drivers/crypto/mxs-dcp.c init_completion(&sdcp->completion[i]); sdcp 1081 drivers/crypto/mxs-dcp.c crypto_init_queue(&sdcp->queue[i], 50); sdcp 1085 drivers/crypto/mxs-dcp.c sdcp->thread[DCP_CHAN_HASH_SHA] = kthread_run(dcp_chan_thread_sha, sdcp 1087 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->thread[DCP_CHAN_HASH_SHA])) { sdcp 1089 drivers/crypto/mxs-dcp.c ret = PTR_ERR(sdcp->thread[DCP_CHAN_HASH_SHA]); sdcp 1093 drivers/crypto/mxs-dcp.c sdcp->thread[DCP_CHAN_CRYPTO] = kthread_run(dcp_chan_thread_aes, sdcp 1095 drivers/crypto/mxs-dcp.c if (IS_ERR(sdcp->thread[DCP_CHAN_CRYPTO])) { sdcp 1097 drivers/crypto/mxs-dcp.c ret = PTR_ERR(sdcp->thread[DCP_CHAN_CRYPTO]); sdcp 1102 drivers/crypto/mxs-dcp.c sdcp->caps = readl(sdcp->base + MXS_DCP_CAPABILITY1); sdcp 1104 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_AES128) { sdcp 1114 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_SHA1) { sdcp 1123 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_SHA256) { sdcp 1135 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_SHA1) sdcp 1139 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_AES128) sdcp 1143 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_CRYPTO]); sdcp 1146 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_HASH_SHA]); sdcp 1149 drivers/crypto/mxs-dcp.c clk_disable_unprepare(sdcp->dcp_clk); sdcp 1156 drivers/crypto/mxs-dcp.c struct dcp *sdcp = platform_get_drvdata(pdev); sdcp 1158 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_SHA256) sdcp 1161 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_SHA1) sdcp 1164 drivers/crypto/mxs-dcp.c if (sdcp->caps & MXS_DCP_CAPABILITY1_AES128) sdcp 1167 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_HASH_SHA]); sdcp 1168 drivers/crypto/mxs-dcp.c kthread_stop(sdcp->thread[DCP_CHAN_CRYPTO]); sdcp 1170 drivers/crypto/mxs-dcp.c clk_disable_unprepare(sdcp->dcp_clk);