Lines Matching refs:crc

78 	struct bfin_crypto_crc	*crc;  member
95 struct bfin_crypto_crc *crc; member
115 static int bfin_crypto_crc_init_hw(struct bfin_crypto_crc *crc, u32 key) in bfin_crypto_crc_init_hw() argument
117 writel(0, &crc->regs->datacntrld); in bfin_crypto_crc_init_hw()
118 writel(MODE_CALC_CRC << OPMODE_OFFSET, &crc->regs->control); in bfin_crypto_crc_init_hw()
119 writel(key, &crc->regs->curresult); in bfin_crypto_crc_init_hw()
122 writel(CMPERRI | DCNTEXPI, &crc->regs->status); in bfin_crypto_crc_init_hw()
123 writel(CMPERRI | DCNTEXPI, &crc->regs->intrenset); in bfin_crypto_crc_init_hw()
133 struct bfin_crypto_crc *crc; in bfin_crypto_crc_init() local
135 dev_dbg(ctx->crc->dev, "crc_init\n"); in bfin_crypto_crc_init()
137 list_for_each_entry(crc, &crc_list.dev_list, list) { in bfin_crypto_crc_init()
138 crc_ctx->crc = crc; in bfin_crypto_crc_init()
144 dev_dbg(ctx->crc->dev, "init: requested sg list is too big > %d\n", in bfin_crypto_crc_init()
149 ctx->crc = crc; in bfin_crypto_crc_init()
159 dev_dbg(ctx->crc->dev, "init: digest size: %d\n", in bfin_crypto_crc_init()
162 return bfin_crypto_crc_init_hw(crc, crc_ctx->key); in bfin_crypto_crc_init()
165 static void bfin_crypto_crc_config_dma(struct bfin_crypto_crc *crc) in bfin_crypto_crc_config_dma() argument
168 struct bfin_crypto_crc_reqctx *ctx = ahash_request_ctx(crc->req); in bfin_crypto_crc_config_dma()
176 dma_map_sg(crc->dev, ctx->sg, ctx->sg_nents, DMA_TO_DEVICE); in bfin_crypto_crc_config_dma()
191 memcpy(crc->sg_mid_buf +(i << 2) + mid_dma_count, in bfin_crypto_crc_config_dma()
201 crc->sg_cpu[i].start_addr = crc->sg_mid_dma + (i << 2); in bfin_crypto_crc_config_dma()
202 crc->sg_cpu[i].cfg = dma_config; in bfin_crypto_crc_config_dma()
203 crc->sg_cpu[i].x_count = 1; in bfin_crypto_crc_config_dma()
204 crc->sg_cpu[i].x_modify = CHKSUM_DIGEST_SIZE; in bfin_crypto_crc_config_dma()
205 dev_dbg(crc->dev, "%d: crc_dma: start_addr:0x%lx, " in bfin_crypto_crc_config_dma()
207 i, crc->sg_cpu[i].start_addr, in bfin_crypto_crc_config_dma()
208 crc->sg_cpu[i].cfg, crc->sg_cpu[i].x_count, in bfin_crypto_crc_config_dma()
209 crc->sg_cpu[i].x_modify); in bfin_crypto_crc_config_dma()
231 crc->sg_cpu[i].start_addr = dma_addr; in bfin_crypto_crc_config_dma()
232 crc->sg_cpu[i].cfg = dma_config; in bfin_crypto_crc_config_dma()
233 crc->sg_cpu[i].x_count = dma_count; in bfin_crypto_crc_config_dma()
234 crc->sg_cpu[i].x_modify = dma_mod; in bfin_crypto_crc_config_dma()
235 dev_dbg(crc->dev, "%d: crc_dma: start_addr:0x%lx, " in bfin_crypto_crc_config_dma()
237 i, crc->sg_cpu[i].start_addr, in bfin_crypto_crc_config_dma()
238 crc->sg_cpu[i].cfg, crc->sg_cpu[i].x_count, in bfin_crypto_crc_config_dma()
239 crc->sg_cpu[i].x_modify); in bfin_crypto_crc_config_dma()
244 memcpy(crc->sg_mid_buf + (i << 2), in bfin_crypto_crc_config_dma()
254 crc->sg_cpu[i].start_addr = dma_map_single(crc->dev, ctx->bufnext, in bfin_crypto_crc_config_dma()
256 crc->sg_cpu[i].cfg = dma_config; in bfin_crypto_crc_config_dma()
257 crc->sg_cpu[i].x_count = 1; in bfin_crypto_crc_config_dma()
258 crc->sg_cpu[i].x_modify = CHKSUM_DIGEST_SIZE; in bfin_crypto_crc_config_dma()
259 dev_dbg(crc->dev, "%d: crc_dma: start_addr:0x%lx, " in bfin_crypto_crc_config_dma()
261 i, crc->sg_cpu[i].start_addr, in bfin_crypto_crc_config_dma()
262 crc->sg_cpu[i].cfg, crc->sg_cpu[i].x_count, in bfin_crypto_crc_config_dma()
263 crc->sg_cpu[i].x_modify); in bfin_crypto_crc_config_dma()
271 crc->sg_cpu[i - 1].cfg &= ~(DMAFLOW | NDSIZE); in bfin_crypto_crc_config_dma()
272 crc->sg_cpu[i - 1].cfg |= DI_EN; in bfin_crypto_crc_config_dma()
273 set_dma_curr_desc_addr(crc->dma_ch, (unsigned long *)crc->sg_dma); in bfin_crypto_crc_config_dma()
274 set_dma_x_count(crc->dma_ch, 0); in bfin_crypto_crc_config_dma()
275 set_dma_x_modify(crc->dma_ch, 0); in bfin_crypto_crc_config_dma()
276 set_dma_config(crc->dma_ch, dma_config); in bfin_crypto_crc_config_dma()
279 static int bfin_crypto_crc_handle_queue(struct bfin_crypto_crc *crc, in bfin_crypto_crc_handle_queue() argument
291 spin_lock_irqsave(&crc->lock, flags); in bfin_crypto_crc_handle_queue()
293 ret = ahash_enqueue_request(&crc->queue, req); in bfin_crypto_crc_handle_queue()
294 if (crc->busy) { in bfin_crypto_crc_handle_queue()
295 spin_unlock_irqrestore(&crc->lock, flags); in bfin_crypto_crc_handle_queue()
298 backlog = crypto_get_backlog(&crc->queue); in bfin_crypto_crc_handle_queue()
299 async_req = crypto_dequeue_request(&crc->queue); in bfin_crypto_crc_handle_queue()
301 crc->busy = 1; in bfin_crypto_crc_handle_queue()
302 spin_unlock_irqrestore(&crc->lock, flags); in bfin_crypto_crc_handle_queue()
311 crc->req = req; in bfin_crypto_crc_handle_queue()
317 dev_dbg(crc->dev, "handling new req, flag=%u, nbytes: %d\n", in bfin_crypto_crc_handle_queue()
322 crc->busy = 0; in bfin_crypto_crc_handle_queue()
339 crc->busy = 0; in bfin_crypto_crc_handle_queue()
389 writel(ctx->sg_buflen >> 2, &crc->regs->datacnt); in bfin_crypto_crc_handle_queue()
392 bfin_crypto_crc_config_dma(crc); in bfin_crypto_crc_handle_queue()
395 reg = readl(&crc->regs->control); in bfin_crypto_crc_handle_queue()
396 writel(reg | BLKEN, &crc->regs->control); in bfin_crypto_crc_handle_queue()
408 dev_dbg(ctx->crc->dev, "crc_update\n"); in bfin_crypto_crc_update()
412 return bfin_crypto_crc_handle_queue(ctx->crc, req); in bfin_crypto_crc_update()
421 dev_dbg(ctx->crc->dev, "crc_final\n"); in bfin_crypto_crc_final()
425 return bfin_crypto_crc_handle_queue(ctx->crc, req); in bfin_crypto_crc_final()
434 dev_dbg(ctx->crc->dev, "crc_finishupdate\n"); in bfin_crypto_crc_finup()
439 return bfin_crypto_crc_handle_queue(ctx->crc, req); in bfin_crypto_crc_finup()
458 dev_dbg(crc_ctx->crc->dev, "crc_setkey\n"); in bfin_crypto_crc_setkey()
509 struct bfin_crypto_crc *crc = (struct bfin_crypto_crc *)data; in bfin_crypto_crc_done_task() local
511 bfin_crypto_crc_handle_queue(crc, NULL); in bfin_crypto_crc_done_task()
516 struct bfin_crypto_crc *crc = dev_id; in bfin_crypto_crc_handler() local
519 if (readl(&crc->regs->status) & DCNTEXP) { in bfin_crypto_crc_handler()
520 writel(DCNTEXP, &crc->regs->status); in bfin_crypto_crc_handler()
523 put_unaligned_le32(readl(&crc->regs->result), in bfin_crypto_crc_handler()
524 crc->req->result); in bfin_crypto_crc_handler()
526 reg = readl(&crc->regs->control); in bfin_crypto_crc_handler()
527 writel(reg & ~BLKEN, &crc->regs->control); in bfin_crypto_crc_handler()
528 crc->busy = 0; in bfin_crypto_crc_handler()
530 if (crc->req->base.complete) in bfin_crypto_crc_handler()
531 crc->req->base.complete(&crc->req->base, 0); in bfin_crypto_crc_handler()
533 tasklet_schedule(&crc->done_task); in bfin_crypto_crc_handler()
548 struct bfin_crypto_crc *crc = platform_get_drvdata(pdev); in bfin_crypto_crc_suspend() local
551 while ((readl(&crc->regs->control) & BLKEN) && --i) in bfin_crypto_crc_suspend()
573 struct bfin_crypto_crc *crc; in bfin_crypto_crc_probe() local
577 crc = devm_kzalloc(dev, sizeof(*crc), GFP_KERNEL); in bfin_crypto_crc_probe()
578 if (!crc) { in bfin_crypto_crc_probe()
583 crc->dev = dev; in bfin_crypto_crc_probe()
585 INIT_LIST_HEAD(&crc->list); in bfin_crypto_crc_probe()
586 spin_lock_init(&crc->lock); in bfin_crypto_crc_probe()
587 tasklet_init(&crc->done_task, bfin_crypto_crc_done_task, (unsigned long)crc); in bfin_crypto_crc_probe()
588 crypto_init_queue(&crc->queue, CRC_CCRYPTO_QUEUE_LENGTH); in bfin_crypto_crc_probe()
596 crc->regs = devm_ioremap_resource(dev, res); in bfin_crypto_crc_probe()
597 if (IS_ERR((void *)crc->regs)) { in bfin_crypto_crc_probe()
599 return PTR_ERR((void *)crc->regs); in bfin_crypto_crc_probe()
602 crc->irq = platform_get_irq(pdev, 0); in bfin_crypto_crc_probe()
603 if (crc->irq < 0) { in bfin_crypto_crc_probe()
608 ret = devm_request_irq(dev, crc->irq, bfin_crypto_crc_handler, in bfin_crypto_crc_probe()
609 IRQF_SHARED, dev_name(dev), crc); in bfin_crypto_crc_probe()
620 crc->dma_ch = res->start; in bfin_crypto_crc_probe()
622 ret = request_dma(crc->dma_ch, dev_name(dev)); in bfin_crypto_crc_probe()
628 crc->sg_cpu = dma_alloc_coherent(&pdev->dev, PAGE_SIZE, &crc->sg_dma, GFP_KERNEL); in bfin_crypto_crc_probe()
629 if (crc->sg_cpu == NULL) { in bfin_crypto_crc_probe()
637 crc->sg_mid_buf = (u8 *)(crc->sg_cpu + ((CRC_MAX_DMA_DESC + 1) << 1)); in bfin_crypto_crc_probe()
638 crc->sg_mid_dma = crc->sg_dma + sizeof(struct dma_desc_array) in bfin_crypto_crc_probe()
641 writel(0, &crc->regs->control); in bfin_crypto_crc_probe()
642 crc->poly = (u32)pdev->dev.platform_data; in bfin_crypto_crc_probe()
643 writel(crc->poly, &crc->regs->poly); in bfin_crypto_crc_probe()
645 while (!(readl(&crc->regs->status) & LUTDONE) && (--timeout) > 0) in bfin_crypto_crc_probe()
651 platform_set_drvdata(pdev, crc); in bfin_crypto_crc_probe()
654 list_add(&crc->list, &crc_list.dev_list); in bfin_crypto_crc_probe()
671 if (crc->sg_cpu) in bfin_crypto_crc_probe()
672 dma_free_coherent(&pdev->dev, PAGE_SIZE, crc->sg_cpu, crc->sg_dma); in bfin_crypto_crc_probe()
673 free_dma(crc->dma_ch); in bfin_crypto_crc_probe()
684 struct bfin_crypto_crc *crc = platform_get_drvdata(pdev); in bfin_crypto_crc_remove() local
686 if (!crc) in bfin_crypto_crc_remove()
690 list_del(&crc->list); in bfin_crypto_crc_remove()
694 tasklet_kill(&crc->done_task); in bfin_crypto_crc_remove()
695 free_dma(crc->dma_ch); in bfin_crypto_crc_remove()