Lines Matching refs:dev
242 static inline void sahara_write(struct sahara_dev *dev, u32 data, u32 reg) in sahara_write() argument
244 writel(data, dev->regs_base + reg); in sahara_write()
247 static inline unsigned int sahara_read(struct sahara_dev *dev, u32 reg) in sahara_read() argument
249 return readl(dev->regs_base + reg); in sahara_read()
252 static u32 sahara_aes_key_hdr(struct sahara_dev *dev) in sahara_aes_key_hdr() argument
258 if (dev->flags & FLAGS_CBC) { in sahara_aes_key_hdr()
263 if (dev->flags & FLAGS_ENCRYPT) { in sahara_aes_key_hdr()
271 static u32 sahara_aes_data_link_hdr(struct sahara_dev *dev) in sahara_aes_data_link_hdr() argument
355 static void sahara_decode_error(struct sahara_dev *dev, unsigned int error) in sahara_decode_error() argument
360 dev_err(dev->device, "%s: Error Register = 0x%08x\n", __func__, error); in sahara_decode_error()
362 dev_err(dev->device, " - %s.\n", sahara_err_src[source]); in sahara_decode_error()
366 dev_err(dev->device, " * DMA read.\n"); in sahara_decode_error()
368 dev_err(dev->device, " * DMA write.\n"); in sahara_decode_error()
370 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
372 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
375 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
377 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
380 dev_err(dev->device, "\n"); in sahara_decode_error()
385 static void sahara_decode_status(struct sahara_dev *dev, unsigned int status) in sahara_decode_status() argument
394 dev_dbg(dev->device, "%s: Status Register = 0x%08x\n", in sahara_decode_status()
397 dev_dbg(dev->device, " - State = %d:\n", state); in sahara_decode_status()
399 dev_dbg(dev->device, " * Descriptor completed. IRQ pending.\n"); in sahara_decode_status()
401 dev_dbg(dev->device, " * %s.\n", in sahara_decode_status()
405 dev_dbg(dev->device, " - DAR Full.\n"); in sahara_decode_status()
407 dev_dbg(dev->device, " - Error.\n"); in sahara_decode_status()
409 dev_dbg(dev->device, " - Secure.\n"); in sahara_decode_status()
411 dev_dbg(dev->device, " - Fail.\n"); in sahara_decode_status()
413 dev_dbg(dev->device, " - RNG Reseed Request.\n"); in sahara_decode_status()
415 dev_dbg(dev->device, " - RNG Active.\n"); in sahara_decode_status()
417 dev_dbg(dev->device, " - MDHA Active.\n"); in sahara_decode_status()
419 dev_dbg(dev->device, " - SKHA Active.\n"); in sahara_decode_status()
422 dev_dbg(dev->device, " - Batch Mode.\n"); in sahara_decode_status()
424 dev_dbg(dev->device, " - Decidated Mode.\n"); in sahara_decode_status()
426 dev_dbg(dev->device, " - Debug Mode.\n"); in sahara_decode_status()
428 dev_dbg(dev->device, " - Internal state = 0x%02x\n", in sahara_decode_status()
431 dev_dbg(dev->device, "Current DAR: 0x%08x\n", in sahara_decode_status()
432 sahara_read(dev, SAHARA_REG_CDAR)); in sahara_decode_status()
433 dev_dbg(dev->device, "Initial DAR: 0x%08x\n\n", in sahara_decode_status()
434 sahara_read(dev, SAHARA_REG_IDAR)); in sahara_decode_status()
437 static void sahara_dump_descriptors(struct sahara_dev *dev) in sahara_dump_descriptors() argument
445 dev_dbg(dev->device, "Descriptor (%d) (0x%08x):\n", in sahara_dump_descriptors()
446 i, dev->hw_phys_desc[i]); in sahara_dump_descriptors()
447 dev_dbg(dev->device, "\thdr = 0x%08x\n", dev->hw_desc[i]->hdr); in sahara_dump_descriptors()
448 dev_dbg(dev->device, "\tlen1 = %u\n", dev->hw_desc[i]->len1); in sahara_dump_descriptors()
449 dev_dbg(dev->device, "\tp1 = 0x%08x\n", dev->hw_desc[i]->p1); in sahara_dump_descriptors()
450 dev_dbg(dev->device, "\tlen2 = %u\n", dev->hw_desc[i]->len2); in sahara_dump_descriptors()
451 dev_dbg(dev->device, "\tp2 = 0x%08x\n", dev->hw_desc[i]->p2); in sahara_dump_descriptors()
452 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_descriptors()
453 dev->hw_desc[i]->next); in sahara_dump_descriptors()
455 dev_dbg(dev->device, "\n"); in sahara_dump_descriptors()
458 static void sahara_dump_links(struct sahara_dev *dev) in sahara_dump_links() argument
466 dev_dbg(dev->device, "Link (%d) (0x%08x):\n", in sahara_dump_links()
467 i, dev->hw_phys_link[i]); in sahara_dump_links()
468 dev_dbg(dev->device, "\tlen = %u\n", dev->hw_link[i]->len); in sahara_dump_links()
469 dev_dbg(dev->device, "\tp = 0x%08x\n", dev->hw_link[i]->p); in sahara_dump_links()
470 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_links()
471 dev->hw_link[i]->next); in sahara_dump_links()
473 dev_dbg(dev->device, "\n"); in sahara_dump_links()
476 static int sahara_hw_descriptor_create(struct sahara_dev *dev) in sahara_hw_descriptor_create() argument
478 struct sahara_ctx *ctx = dev->ctx; in sahara_hw_descriptor_create()
486 memcpy(dev->key_base, ctx->key, ctx->keylen); in sahara_hw_descriptor_create()
489 if (dev->flags & FLAGS_CBC) { in sahara_hw_descriptor_create()
490 dev->hw_desc[idx]->len1 = AES_BLOCK_SIZE; in sahara_hw_descriptor_create()
491 dev->hw_desc[idx]->p1 = dev->iv_phys_base; in sahara_hw_descriptor_create()
493 dev->hw_desc[idx]->len1 = 0; in sahara_hw_descriptor_create()
494 dev->hw_desc[idx]->p1 = 0; in sahara_hw_descriptor_create()
496 dev->hw_desc[idx]->len2 = ctx->keylen; in sahara_hw_descriptor_create()
497 dev->hw_desc[idx]->p2 = dev->key_phys_base; in sahara_hw_descriptor_create()
498 dev->hw_desc[idx]->next = dev->hw_phys_desc[1]; in sahara_hw_descriptor_create()
500 dev->hw_desc[idx]->hdr = sahara_aes_key_hdr(dev); in sahara_hw_descriptor_create()
505 dev->nb_in_sg = sahara_sg_length(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
506 dev->nb_out_sg = sahara_sg_length(dev->out_sg, dev->total); in sahara_hw_descriptor_create()
507 if ((dev->nb_in_sg + dev->nb_out_sg) > SAHARA_MAX_HW_LINK) { in sahara_hw_descriptor_create()
508 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_hw_descriptor_create()
509 dev->nb_in_sg + dev->nb_out_sg); in sahara_hw_descriptor_create()
513 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
515 if (ret != dev->nb_in_sg) { in sahara_hw_descriptor_create()
516 dev_err(dev->device, "couldn't map in sg\n"); in sahara_hw_descriptor_create()
519 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
521 if (ret != dev->nb_out_sg) { in sahara_hw_descriptor_create()
522 dev_err(dev->device, "couldn't map out sg\n"); in sahara_hw_descriptor_create()
527 dev->hw_desc[idx]->p1 = dev->hw_phys_link[0]; in sahara_hw_descriptor_create()
528 sg = dev->in_sg; in sahara_hw_descriptor_create()
529 for (i = 0; i < dev->nb_in_sg; i++) { in sahara_hw_descriptor_create()
530 dev->hw_link[i]->len = sg->length; in sahara_hw_descriptor_create()
531 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
532 if (i == (dev->nb_in_sg - 1)) { in sahara_hw_descriptor_create()
533 dev->hw_link[i]->next = 0; in sahara_hw_descriptor_create()
535 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_hw_descriptor_create()
541 dev->hw_desc[idx]->p2 = dev->hw_phys_link[i]; in sahara_hw_descriptor_create()
542 sg = dev->out_sg; in sahara_hw_descriptor_create()
543 for (j = i; j < dev->nb_out_sg + i; j++) { in sahara_hw_descriptor_create()
544 dev->hw_link[j]->len = sg->length; in sahara_hw_descriptor_create()
545 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
546 if (j == (dev->nb_out_sg + i - 1)) { in sahara_hw_descriptor_create()
547 dev->hw_link[j]->next = 0; in sahara_hw_descriptor_create()
549 dev->hw_link[j]->next = dev->hw_phys_link[j + 1]; in sahara_hw_descriptor_create()
555 dev->hw_desc[idx]->hdr = sahara_aes_data_link_hdr(dev); in sahara_hw_descriptor_create()
556 dev->hw_desc[idx]->len1 = dev->total; in sahara_hw_descriptor_create()
557 dev->hw_desc[idx]->len2 = dev->total; in sahara_hw_descriptor_create()
558 dev->hw_desc[idx]->next = 0; in sahara_hw_descriptor_create()
560 sahara_dump_descriptors(dev); in sahara_hw_descriptor_create()
561 sahara_dump_links(dev); in sahara_hw_descriptor_create()
563 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_hw_descriptor_create()
568 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
571 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
579 struct sahara_dev *dev = dev_ptr; in sahara_aes_process() local
586 dev_dbg(dev->device, in sahara_aes_process()
591 dev->total = req->nbytes; in sahara_aes_process()
592 dev->in_sg = req->src; in sahara_aes_process()
593 dev->out_sg = req->dst; in sahara_aes_process()
598 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process()
600 if ((dev->flags & FLAGS_CBC) && req->info) in sahara_aes_process()
601 memcpy(dev->iv_base, req->info, AES_KEYSIZE_128); in sahara_aes_process()
604 dev->ctx = ctx; in sahara_aes_process()
606 reinit_completion(&dev->dma_completion); in sahara_aes_process()
608 ret = sahara_hw_descriptor_create(dev); in sahara_aes_process()
612 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_aes_process()
615 dev_err(dev->device, "AES timeout\n"); in sahara_aes_process()
619 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
621 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
667 struct sahara_dev *dev = dev_ptr; in sahara_aes_crypt() local
670 dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n", in sahara_aes_crypt()
674 dev_err(dev->device, in sahara_aes_crypt()
681 mutex_lock(&dev->queue_mutex); in sahara_aes_crypt()
682 err = ablkcipher_enqueue_request(&dev->queue, req); in sahara_aes_crypt()
683 mutex_unlock(&dev->queue_mutex); in sahara_aes_crypt()
685 wake_up_process(dev->kthread); in sahara_aes_crypt()
788 static u32 sahara_sha_init_hdr(struct sahara_dev *dev, in sahara_sha_init_hdr() argument
811 static int sahara_sha_hw_links_create(struct sahara_dev *dev, in sahara_sha_hw_links_create() argument
819 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
821 dev->nb_in_sg = sahara_sg_length(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
822 if ((dev->nb_in_sg) > SAHARA_MAX_HW_LINK) { in sahara_sha_hw_links_create()
823 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_sha_hw_links_create()
824 dev->nb_in_sg + dev->nb_out_sg); in sahara_sha_hw_links_create()
830 sg = dev->in_sg; in sahara_sha_hw_links_create()
832 ret = dma_map_sg(dev->device, sg, 1, in sahara_sha_hw_links_create()
837 dev->hw_link[i]->len = sg->length; in sahara_sha_hw_links_create()
838 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()
839 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_sha_hw_links_create()
843 dev->hw_link[i-1]->next = 0; in sahara_sha_hw_links_create()
845 sg = dev->in_sg; in sahara_sha_hw_links_create()
846 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_sha_hw_links_create()
851 for (i = start; i < dev->nb_in_sg + start; i++) { in sahara_sha_hw_links_create()
852 dev->hw_link[i]->len = sg->length; in sahara_sha_hw_links_create()
853 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()
854 if (i == (dev->nb_in_sg + start - 1)) { in sahara_sha_hw_links_create()
855 dev->hw_link[i]->next = 0; in sahara_sha_hw_links_create()
857 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_sha_hw_links_create()
866 static int sahara_sha_hw_data_descriptor_create(struct sahara_dev *dev, in sahara_sha_hw_data_descriptor_create() argument
876 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_data_descriptor_create()
879 dev->hw_desc[index]->hdr = SAHARA_HDR_MDHA_HASH; in sahara_sha_hw_data_descriptor_create()
881 dev->hw_desc[index]->len1 = rctx->total; in sahara_sha_hw_data_descriptor_create()
882 if (dev->hw_desc[index]->len1 == 0) { in sahara_sha_hw_data_descriptor_create()
884 dev->hw_desc[index]->p1 = 0; in sahara_sha_hw_data_descriptor_create()
888 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_data_descriptor_create()
889 i = sahara_sha_hw_links_create(dev, rctx, index); in sahara_sha_hw_data_descriptor_create()
896 dev->hw_desc[index]->p2 = dev->hw_phys_link[i]; in sahara_sha_hw_data_descriptor_create()
900 dev->hw_link[i]->p = dev->context_phys_base; in sahara_sha_hw_data_descriptor_create()
902 dev->hw_link[i]->len = result_len; in sahara_sha_hw_data_descriptor_create()
903 dev->hw_desc[index]->len2 = result_len; in sahara_sha_hw_data_descriptor_create()
905 dev->hw_link[i]->next = 0; in sahara_sha_hw_data_descriptor_create()
919 static int sahara_sha_hw_context_descriptor_create(struct sahara_dev *dev, in sahara_sha_hw_context_descriptor_create() argument
924 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_context_descriptor_create()
926 dev->hw_desc[index]->len1 = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
927 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_context_descriptor_create()
928 dev->hw_desc[index]->len2 = 0; in sahara_sha_hw_context_descriptor_create()
929 dev->hw_desc[index]->p2 = 0; in sahara_sha_hw_context_descriptor_create()
931 dev->hw_link[index]->len = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
932 dev->hw_link[index]->p = dev->context_phys_base; in sahara_sha_hw_context_descriptor_create()
933 dev->hw_link[index]->next = 0; in sahara_sha_hw_context_descriptor_create()
1033 static void sahara_sha_unmap_sg(struct sahara_dev *dev, in sahara_sha_unmap_sg() argument
1039 sg = dev->in_sg; in sahara_sha_unmap_sg()
1041 dma_unmap_sg(dev->device, sg, 1, DMA_TO_DEVICE); in sahara_sha_unmap_sg()
1045 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_sha_unmap_sg()
1052 struct sahara_dev *dev = dev_ptr; in sahara_sha_process() local
1062 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 0); in sahara_sha_process()
1063 dev->hw_desc[0]->next = 0; in sahara_sha_process()
1066 memcpy(dev->context_base, rctx->context, rctx->context_size); in sahara_sha_process()
1068 sahara_sha_hw_context_descriptor_create(dev, rctx, req, 0); in sahara_sha_process()
1069 dev->hw_desc[0]->next = dev->hw_phys_desc[1]; in sahara_sha_process()
1070 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 1); in sahara_sha_process()
1071 dev->hw_desc[1]->next = 0; in sahara_sha_process()
1074 sahara_dump_descriptors(dev); in sahara_sha_process()
1075 sahara_dump_links(dev); in sahara_sha_process()
1077 reinit_completion(&dev->dma_completion); in sahara_sha_process()
1079 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_sha_process()
1081 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_sha_process()
1084 dev_err(dev->device, "SHA timeout\n"); in sahara_sha_process()
1089 sahara_sha_unmap_sg(dev, rctx); in sahara_sha_process()
1091 memcpy(rctx->context, dev->context_base, rctx->context_size); in sahara_sha_process()
1101 struct sahara_dev *dev = (struct sahara_dev *)data; in sahara_queue_manage() local
1109 mutex_lock(&dev->queue_mutex); in sahara_queue_manage()
1110 backlog = crypto_get_backlog(&dev->queue); in sahara_queue_manage()
1111 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1112 mutex_unlock(&dev->queue_mutex); in sahara_queue_manage()
1145 struct sahara_dev *dev = dev_ptr; in sahara_sha_enqueue() local
1159 mutex_lock(&dev->queue_mutex); in sahara_sha_enqueue()
1160 ret = crypto_enqueue_request(&dev->queue, &req->base); in sahara_sha_enqueue()
1161 mutex_unlock(&dev->queue_mutex); in sahara_sha_enqueue()
1163 wake_up_process(dev->kthread); in sahara_sha_enqueue()
1373 struct sahara_dev *dev = (struct sahara_dev *)data; in sahara_irq_handler() local
1374 unsigned int stat = sahara_read(dev, SAHARA_REG_STATUS); in sahara_irq_handler()
1375 unsigned int err = sahara_read(dev, SAHARA_REG_ERRSTATUS); in sahara_irq_handler()
1377 sahara_write(dev, SAHARA_CMD_CLEAR_INT | SAHARA_CMD_CLEAR_ERR, in sahara_irq_handler()
1380 sahara_decode_status(dev, stat); in sahara_irq_handler()
1385 dev->error = 0; in sahara_irq_handler()
1387 sahara_decode_error(dev, err); in sahara_irq_handler()
1388 dev->error = -EINVAL; in sahara_irq_handler()
1391 complete(&dev->dma_completion); in sahara_irq_handler()
1397 static int sahara_register_algs(struct sahara_dev *dev) in sahara_register_algs() argument
1415 if (dev->version > SAHARA_VERSION_3) in sahara_register_algs()
1439 static void sahara_unregister_algs(struct sahara_dev *dev) in sahara_unregister_algs() argument
1449 if (dev->version > SAHARA_VERSION_3) in sahara_unregister_algs()
1469 struct sahara_dev *dev; in sahara_probe() local
1476 dev = devm_kzalloc(&pdev->dev, sizeof(struct sahara_dev), GFP_KERNEL); in sahara_probe()
1477 if (dev == NULL) { in sahara_probe()
1478 dev_err(&pdev->dev, "unable to alloc data struct.\n"); in sahara_probe()
1482 dev->device = &pdev->dev; in sahara_probe()
1483 platform_set_drvdata(pdev, dev); in sahara_probe()
1487 dev->regs_base = devm_ioremap_resource(&pdev->dev, res); in sahara_probe()
1488 if (IS_ERR(dev->regs_base)) in sahara_probe()
1489 return PTR_ERR(dev->regs_base); in sahara_probe()
1494 dev_err(&pdev->dev, "failed to get irq resource\n"); in sahara_probe()
1498 err = devm_request_irq(&pdev->dev, irq, sahara_irq_handler, in sahara_probe()
1499 0, dev_name(&pdev->dev), dev); in sahara_probe()
1501 dev_err(&pdev->dev, "failed to request irq\n"); in sahara_probe()
1506 dev->clk_ipg = devm_clk_get(&pdev->dev, "ipg"); in sahara_probe()
1507 if (IS_ERR(dev->clk_ipg)) { in sahara_probe()
1508 dev_err(&pdev->dev, "Could not get ipg clock\n"); in sahara_probe()
1509 return PTR_ERR(dev->clk_ipg); in sahara_probe()
1512 dev->clk_ahb = devm_clk_get(&pdev->dev, "ahb"); in sahara_probe()
1513 if (IS_ERR(dev->clk_ahb)) { in sahara_probe()
1514 dev_err(&pdev->dev, "Could not get ahb clock\n"); in sahara_probe()
1515 return PTR_ERR(dev->clk_ahb); in sahara_probe()
1519 dev->hw_desc[0] = dma_alloc_coherent(&pdev->dev, in sahara_probe()
1521 &dev->hw_phys_desc[0], GFP_KERNEL); in sahara_probe()
1522 if (!dev->hw_desc[0]) { in sahara_probe()
1523 dev_err(&pdev->dev, "Could not allocate hw descriptors\n"); in sahara_probe()
1526 dev->hw_desc[1] = dev->hw_desc[0] + 1; in sahara_probe()
1527 dev->hw_phys_desc[1] = dev->hw_phys_desc[0] + in sahara_probe()
1531 dev->key_base = dma_alloc_coherent(&pdev->dev, 2 * AES_KEYSIZE_128, in sahara_probe()
1532 &dev->key_phys_base, GFP_KERNEL); in sahara_probe()
1533 if (!dev->key_base) { in sahara_probe()
1534 dev_err(&pdev->dev, "Could not allocate memory for key\n"); in sahara_probe()
1538 dev->iv_base = dev->key_base + AES_KEYSIZE_128; in sahara_probe()
1539 dev->iv_phys_base = dev->key_phys_base + AES_KEYSIZE_128; in sahara_probe()
1542 dev->context_base = dma_alloc_coherent(&pdev->dev, in sahara_probe()
1544 &dev->context_phys_base, GFP_KERNEL); in sahara_probe()
1545 if (!dev->context_base) { in sahara_probe()
1546 dev_err(&pdev->dev, "Could not allocate memory for MDHA context\n"); in sahara_probe()
1552 dev->hw_link[0] = dma_alloc_coherent(&pdev->dev, in sahara_probe()
1554 &dev->hw_phys_link[0], GFP_KERNEL); in sahara_probe()
1555 if (!dev->hw_link[0]) { in sahara_probe()
1556 dev_err(&pdev->dev, "Could not allocate hw links\n"); in sahara_probe()
1561 dev->hw_phys_link[i] = dev->hw_phys_link[i - 1] + in sahara_probe()
1563 dev->hw_link[i] = dev->hw_link[i - 1] + 1; in sahara_probe()
1566 crypto_init_queue(&dev->queue, SAHARA_QUEUE_LENGTH); in sahara_probe()
1568 spin_lock_init(&dev->lock); in sahara_probe()
1569 mutex_init(&dev->queue_mutex); in sahara_probe()
1571 dev_ptr = dev; in sahara_probe()
1573 dev->kthread = kthread_run(sahara_queue_manage, dev, "sahara_crypto"); in sahara_probe()
1574 if (IS_ERR(dev->kthread)) { in sahara_probe()
1575 err = PTR_ERR(dev->kthread); in sahara_probe()
1579 init_completion(&dev->dma_completion); in sahara_probe()
1581 clk_prepare_enable(dev->clk_ipg); in sahara_probe()
1582 clk_prepare_enable(dev->clk_ahb); in sahara_probe()
1584 version = sahara_read(dev, SAHARA_REG_VERSION); in sahara_probe()
1585 if (of_device_is_compatible(pdev->dev.of_node, "fsl,imx27-sahara")) { in sahara_probe()
1588 } else if (of_device_is_compatible(pdev->dev.of_node, in sahara_probe()
1595 dev_err(&pdev->dev, "SAHARA version %d not supported\n", in sahara_probe()
1600 dev->version = version; in sahara_probe()
1602 sahara_write(dev, SAHARA_CMD_RESET | SAHARA_CMD_MODE_BATCH, in sahara_probe()
1604 sahara_write(dev, SAHARA_CONTROL_SET_THROTTLE(0) | in sahara_probe()
1610 err = sahara_register_algs(dev); in sahara_probe()
1614 dev_info(&pdev->dev, "SAHARA version %d initialized\n", version); in sahara_probe()
1619 dma_free_coherent(&pdev->dev, in sahara_probe()
1621 dev->hw_link[0], dev->hw_phys_link[0]); in sahara_probe()
1622 clk_disable_unprepare(dev->clk_ipg); in sahara_probe()
1623 clk_disable_unprepare(dev->clk_ahb); in sahara_probe()
1624 kthread_stop(dev->kthread); in sahara_probe()
1627 dma_free_coherent(&pdev->dev, in sahara_probe()
1629 dev->key_base, dev->key_phys_base); in sahara_probe()
1630 dma_free_coherent(&pdev->dev, in sahara_probe()
1632 dev->context_base, dev->context_phys_base); in sahara_probe()
1634 dma_free_coherent(&pdev->dev, in sahara_probe()
1636 dev->hw_desc[0], dev->hw_phys_desc[0]); in sahara_probe()
1643 struct sahara_dev *dev = platform_get_drvdata(pdev); in sahara_remove() local
1645 dma_free_coherent(&pdev->dev, in sahara_remove()
1647 dev->hw_link[0], dev->hw_phys_link[0]); in sahara_remove()
1648 dma_free_coherent(&pdev->dev, in sahara_remove()
1650 dev->key_base, dev->key_phys_base); in sahara_remove()
1651 dma_free_coherent(&pdev->dev, in sahara_remove()
1653 dev->hw_desc[0], dev->hw_phys_desc[0]); in sahara_remove()
1655 kthread_stop(dev->kthread); in sahara_remove()
1657 sahara_unregister_algs(dev); in sahara_remove()
1659 clk_disable_unprepare(dev->clk_ipg); in sahara_remove()
1660 clk_disable_unprepare(dev->clk_ahb); in sahara_remove()