Lines Matching refs:dev
240 static inline void sahara_write(struct sahara_dev *dev, u32 data, u32 reg) in sahara_write() argument
242 writel(data, dev->regs_base + reg); in sahara_write()
245 static inline unsigned int sahara_read(struct sahara_dev *dev, u32 reg) in sahara_read() argument
247 return readl(dev->regs_base + reg); in sahara_read()
250 static u32 sahara_aes_key_hdr(struct sahara_dev *dev) in sahara_aes_key_hdr() argument
256 if (dev->flags & FLAGS_CBC) { in sahara_aes_key_hdr()
261 if (dev->flags & FLAGS_ENCRYPT) { in sahara_aes_key_hdr()
269 static u32 sahara_aes_data_link_hdr(struct sahara_dev *dev) in sahara_aes_data_link_hdr() argument
329 static void sahara_decode_error(struct sahara_dev *dev, unsigned int error) in sahara_decode_error() argument
334 dev_err(dev->device, "%s: Error Register = 0x%08x\n", __func__, error); in sahara_decode_error()
336 dev_err(dev->device, " - %s.\n", sahara_err_src[source]); in sahara_decode_error()
340 dev_err(dev->device, " * DMA read.\n"); in sahara_decode_error()
342 dev_err(dev->device, " * DMA write.\n"); in sahara_decode_error()
344 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
346 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
349 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
351 dev_err(dev->device, " * %s.\n", in sahara_decode_error()
354 dev_err(dev->device, "\n"); in sahara_decode_error()
359 static void sahara_decode_status(struct sahara_dev *dev, unsigned int status) in sahara_decode_status() argument
368 dev_dbg(dev->device, "%s: Status Register = 0x%08x\n", in sahara_decode_status()
371 dev_dbg(dev->device, " - State = %d:\n", state); in sahara_decode_status()
373 dev_dbg(dev->device, " * Descriptor completed. IRQ pending.\n"); in sahara_decode_status()
375 dev_dbg(dev->device, " * %s.\n", in sahara_decode_status()
379 dev_dbg(dev->device, " - DAR Full.\n"); in sahara_decode_status()
381 dev_dbg(dev->device, " - Error.\n"); in sahara_decode_status()
383 dev_dbg(dev->device, " - Secure.\n"); in sahara_decode_status()
385 dev_dbg(dev->device, " - Fail.\n"); in sahara_decode_status()
387 dev_dbg(dev->device, " - RNG Reseed Request.\n"); in sahara_decode_status()
389 dev_dbg(dev->device, " - RNG Active.\n"); in sahara_decode_status()
391 dev_dbg(dev->device, " - MDHA Active.\n"); in sahara_decode_status()
393 dev_dbg(dev->device, " - SKHA Active.\n"); in sahara_decode_status()
396 dev_dbg(dev->device, " - Batch Mode.\n"); in sahara_decode_status()
398 dev_dbg(dev->device, " - Decidated Mode.\n"); in sahara_decode_status()
400 dev_dbg(dev->device, " - Debug Mode.\n"); in sahara_decode_status()
402 dev_dbg(dev->device, " - Internal state = 0x%02x\n", in sahara_decode_status()
405 dev_dbg(dev->device, "Current DAR: 0x%08x\n", in sahara_decode_status()
406 sahara_read(dev, SAHARA_REG_CDAR)); in sahara_decode_status()
407 dev_dbg(dev->device, "Initial DAR: 0x%08x\n\n", in sahara_decode_status()
408 sahara_read(dev, SAHARA_REG_IDAR)); in sahara_decode_status()
411 static void sahara_dump_descriptors(struct sahara_dev *dev) in sahara_dump_descriptors() argument
419 dev_dbg(dev->device, "Descriptor (%d) (0x%08x):\n", in sahara_dump_descriptors()
420 i, dev->hw_phys_desc[i]); in sahara_dump_descriptors()
421 dev_dbg(dev->device, "\thdr = 0x%08x\n", dev->hw_desc[i]->hdr); in sahara_dump_descriptors()
422 dev_dbg(dev->device, "\tlen1 = %u\n", dev->hw_desc[i]->len1); in sahara_dump_descriptors()
423 dev_dbg(dev->device, "\tp1 = 0x%08x\n", dev->hw_desc[i]->p1); in sahara_dump_descriptors()
424 dev_dbg(dev->device, "\tlen2 = %u\n", dev->hw_desc[i]->len2); in sahara_dump_descriptors()
425 dev_dbg(dev->device, "\tp2 = 0x%08x\n", dev->hw_desc[i]->p2); in sahara_dump_descriptors()
426 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_descriptors()
427 dev->hw_desc[i]->next); in sahara_dump_descriptors()
429 dev_dbg(dev->device, "\n"); in sahara_dump_descriptors()
432 static void sahara_dump_links(struct sahara_dev *dev) in sahara_dump_links() argument
440 dev_dbg(dev->device, "Link (%d) (0x%08x):\n", in sahara_dump_links()
441 i, dev->hw_phys_link[i]); in sahara_dump_links()
442 dev_dbg(dev->device, "\tlen = %u\n", dev->hw_link[i]->len); in sahara_dump_links()
443 dev_dbg(dev->device, "\tp = 0x%08x\n", dev->hw_link[i]->p); in sahara_dump_links()
444 dev_dbg(dev->device, "\tnext = 0x%08x\n", in sahara_dump_links()
445 dev->hw_link[i]->next); in sahara_dump_links()
447 dev_dbg(dev->device, "\n"); in sahara_dump_links()
450 static int sahara_hw_descriptor_create(struct sahara_dev *dev) in sahara_hw_descriptor_create() argument
452 struct sahara_ctx *ctx = dev->ctx; in sahara_hw_descriptor_create()
460 memcpy(dev->key_base, ctx->key, ctx->keylen); in sahara_hw_descriptor_create()
463 if (dev->flags & FLAGS_CBC) { in sahara_hw_descriptor_create()
464 dev->hw_desc[idx]->len1 = AES_BLOCK_SIZE; in sahara_hw_descriptor_create()
465 dev->hw_desc[idx]->p1 = dev->iv_phys_base; in sahara_hw_descriptor_create()
467 dev->hw_desc[idx]->len1 = 0; in sahara_hw_descriptor_create()
468 dev->hw_desc[idx]->p1 = 0; in sahara_hw_descriptor_create()
470 dev->hw_desc[idx]->len2 = ctx->keylen; in sahara_hw_descriptor_create()
471 dev->hw_desc[idx]->p2 = dev->key_phys_base; in sahara_hw_descriptor_create()
472 dev->hw_desc[idx]->next = dev->hw_phys_desc[1]; in sahara_hw_descriptor_create()
474 dev->hw_desc[idx]->hdr = sahara_aes_key_hdr(dev); in sahara_hw_descriptor_create()
479 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, dev->total); in sahara_hw_descriptor_create()
480 dev->nb_out_sg = sg_nents_for_len(dev->out_sg, dev->total); in sahara_hw_descriptor_create()
481 if ((dev->nb_in_sg + dev->nb_out_sg) > SAHARA_MAX_HW_LINK) { in sahara_hw_descriptor_create()
482 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_hw_descriptor_create()
483 dev->nb_in_sg + dev->nb_out_sg); in sahara_hw_descriptor_create()
487 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
489 if (ret != dev->nb_in_sg) { in sahara_hw_descriptor_create()
490 dev_err(dev->device, "couldn't map in sg\n"); in sahara_hw_descriptor_create()
493 ret = dma_map_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
495 if (ret != dev->nb_out_sg) { in sahara_hw_descriptor_create()
496 dev_err(dev->device, "couldn't map out sg\n"); in sahara_hw_descriptor_create()
501 dev->hw_desc[idx]->p1 = dev->hw_phys_link[0]; in sahara_hw_descriptor_create()
502 sg = dev->in_sg; in sahara_hw_descriptor_create()
503 for (i = 0; i < dev->nb_in_sg; i++) { in sahara_hw_descriptor_create()
504 dev->hw_link[i]->len = sg->length; in sahara_hw_descriptor_create()
505 dev->hw_link[i]->p = sg->dma_address; in sahara_hw_descriptor_create()
506 if (i == (dev->nb_in_sg - 1)) { in sahara_hw_descriptor_create()
507 dev->hw_link[i]->next = 0; in sahara_hw_descriptor_create()
509 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_hw_descriptor_create()
515 dev->hw_desc[idx]->p2 = dev->hw_phys_link[i]; in sahara_hw_descriptor_create()
516 sg = dev->out_sg; in sahara_hw_descriptor_create()
517 for (j = i; j < dev->nb_out_sg + i; j++) { in sahara_hw_descriptor_create()
518 dev->hw_link[j]->len = sg->length; in sahara_hw_descriptor_create()
519 dev->hw_link[j]->p = sg->dma_address; in sahara_hw_descriptor_create()
520 if (j == (dev->nb_out_sg + i - 1)) { in sahara_hw_descriptor_create()
521 dev->hw_link[j]->next = 0; in sahara_hw_descriptor_create()
523 dev->hw_link[j]->next = dev->hw_phys_link[j + 1]; in sahara_hw_descriptor_create()
529 dev->hw_desc[idx]->hdr = sahara_aes_data_link_hdr(dev); in sahara_hw_descriptor_create()
530 dev->hw_desc[idx]->len1 = dev->total; in sahara_hw_descriptor_create()
531 dev->hw_desc[idx]->len2 = dev->total; in sahara_hw_descriptor_create()
532 dev->hw_desc[idx]->next = 0; in sahara_hw_descriptor_create()
534 sahara_dump_descriptors(dev); in sahara_hw_descriptor_create()
535 sahara_dump_links(dev); in sahara_hw_descriptor_create()
537 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_hw_descriptor_create()
542 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_hw_descriptor_create()
545 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_hw_descriptor_create()
553 struct sahara_dev *dev = dev_ptr; in sahara_aes_process() local
560 dev_dbg(dev->device, in sahara_aes_process()
565 dev->total = req->nbytes; in sahara_aes_process()
566 dev->in_sg = req->src; in sahara_aes_process()
567 dev->out_sg = req->dst; in sahara_aes_process()
572 dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; in sahara_aes_process()
574 if ((dev->flags & FLAGS_CBC) && req->info) in sahara_aes_process()
575 memcpy(dev->iv_base, req->info, AES_KEYSIZE_128); in sahara_aes_process()
578 dev->ctx = ctx; in sahara_aes_process()
580 reinit_completion(&dev->dma_completion); in sahara_aes_process()
582 ret = sahara_hw_descriptor_create(dev); in sahara_aes_process()
586 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_aes_process()
589 dev_err(dev->device, "AES timeout\n"); in sahara_aes_process()
593 dma_unmap_sg(dev->device, dev->out_sg, dev->nb_out_sg, in sahara_aes_process()
595 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_aes_process()
641 struct sahara_dev *dev = dev_ptr; in sahara_aes_crypt() local
644 dev_dbg(dev->device, "nbytes: %d, enc: %d, cbc: %d\n", in sahara_aes_crypt()
648 dev_err(dev->device, in sahara_aes_crypt()
655 mutex_lock(&dev->queue_mutex); in sahara_aes_crypt()
656 err = ablkcipher_enqueue_request(&dev->queue, req); in sahara_aes_crypt()
657 mutex_unlock(&dev->queue_mutex); in sahara_aes_crypt()
659 wake_up_process(dev->kthread); in sahara_aes_crypt()
762 static u32 sahara_sha_init_hdr(struct sahara_dev *dev, in sahara_sha_init_hdr() argument
785 static int sahara_sha_hw_links_create(struct sahara_dev *dev, in sahara_sha_hw_links_create() argument
793 dev->in_sg = rctx->in_sg; in sahara_sha_hw_links_create()
795 dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); in sahara_sha_hw_links_create()
796 if ((dev->nb_in_sg) > SAHARA_MAX_HW_LINK) { in sahara_sha_hw_links_create()
797 dev_err(dev->device, "not enough hw links (%d)\n", in sahara_sha_hw_links_create()
798 dev->nb_in_sg + dev->nb_out_sg); in sahara_sha_hw_links_create()
802 sg = dev->in_sg; in sahara_sha_hw_links_create()
803 ret = dma_map_sg(dev->device, dev->in_sg, dev->nb_in_sg, DMA_TO_DEVICE); in sahara_sha_hw_links_create()
807 for (i = start; i < dev->nb_in_sg + start; i++) { in sahara_sha_hw_links_create()
808 dev->hw_link[i]->len = sg->length; in sahara_sha_hw_links_create()
809 dev->hw_link[i]->p = sg->dma_address; in sahara_sha_hw_links_create()
810 if (i == (dev->nb_in_sg + start - 1)) { in sahara_sha_hw_links_create()
811 dev->hw_link[i]->next = 0; in sahara_sha_hw_links_create()
813 dev->hw_link[i]->next = dev->hw_phys_link[i + 1]; in sahara_sha_hw_links_create()
821 static int sahara_sha_hw_data_descriptor_create(struct sahara_dev *dev, in sahara_sha_hw_data_descriptor_create() argument
831 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_data_descriptor_create()
834 dev->hw_desc[index]->hdr = SAHARA_HDR_MDHA_HASH; in sahara_sha_hw_data_descriptor_create()
836 dev->hw_desc[index]->len1 = rctx->total; in sahara_sha_hw_data_descriptor_create()
837 if (dev->hw_desc[index]->len1 == 0) { in sahara_sha_hw_data_descriptor_create()
839 dev->hw_desc[index]->p1 = 0; in sahara_sha_hw_data_descriptor_create()
843 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_data_descriptor_create()
844 i = sahara_sha_hw_links_create(dev, rctx, index); in sahara_sha_hw_data_descriptor_create()
851 dev->hw_desc[index]->p2 = dev->hw_phys_link[i]; in sahara_sha_hw_data_descriptor_create()
855 dev->hw_link[i]->p = dev->context_phys_base; in sahara_sha_hw_data_descriptor_create()
857 dev->hw_link[i]->len = result_len; in sahara_sha_hw_data_descriptor_create()
858 dev->hw_desc[index]->len2 = result_len; in sahara_sha_hw_data_descriptor_create()
860 dev->hw_link[i]->next = 0; in sahara_sha_hw_data_descriptor_create()
874 static int sahara_sha_hw_context_descriptor_create(struct sahara_dev *dev, in sahara_sha_hw_context_descriptor_create() argument
879 dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); in sahara_sha_hw_context_descriptor_create()
881 dev->hw_desc[index]->len1 = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
882 dev->hw_desc[index]->p1 = dev->hw_phys_link[index]; in sahara_sha_hw_context_descriptor_create()
883 dev->hw_desc[index]->len2 = 0; in sahara_sha_hw_context_descriptor_create()
884 dev->hw_desc[index]->p2 = 0; in sahara_sha_hw_context_descriptor_create()
886 dev->hw_link[index]->len = rctx->context_size; in sahara_sha_hw_context_descriptor_create()
887 dev->hw_link[index]->p = dev->context_phys_base; in sahara_sha_hw_context_descriptor_create()
888 dev->hw_link[index]->next = 0; in sahara_sha_hw_context_descriptor_create()
987 struct sahara_dev *dev = dev_ptr; in sahara_sha_process() local
997 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 0); in sahara_sha_process()
998 dev->hw_desc[0]->next = 0; in sahara_sha_process()
1001 memcpy(dev->context_base, rctx->context, rctx->context_size); in sahara_sha_process()
1003 sahara_sha_hw_context_descriptor_create(dev, rctx, req, 0); in sahara_sha_process()
1004 dev->hw_desc[0]->next = dev->hw_phys_desc[1]; in sahara_sha_process()
1005 sahara_sha_hw_data_descriptor_create(dev, rctx, req, 1); in sahara_sha_process()
1006 dev->hw_desc[1]->next = 0; in sahara_sha_process()
1009 sahara_dump_descriptors(dev); in sahara_sha_process()
1010 sahara_dump_links(dev); in sahara_sha_process()
1012 reinit_completion(&dev->dma_completion); in sahara_sha_process()
1014 sahara_write(dev, dev->hw_phys_desc[0], SAHARA_REG_DAR); in sahara_sha_process()
1016 timeout = wait_for_completion_timeout(&dev->dma_completion, in sahara_sha_process()
1019 dev_err(dev->device, "SHA timeout\n"); in sahara_sha_process()
1024 dma_unmap_sg(dev->device, dev->in_sg, dev->nb_in_sg, in sahara_sha_process()
1027 memcpy(rctx->context, dev->context_base, rctx->context_size); in sahara_sha_process()
1037 struct sahara_dev *dev = (struct sahara_dev *)data; in sahara_queue_manage() local
1045 mutex_lock(&dev->queue_mutex); in sahara_queue_manage()
1046 backlog = crypto_get_backlog(&dev->queue); in sahara_queue_manage()
1047 async_req = crypto_dequeue_request(&dev->queue); in sahara_queue_manage()
1048 mutex_unlock(&dev->queue_mutex); in sahara_queue_manage()
1081 struct sahara_dev *dev = dev_ptr; in sahara_sha_enqueue() local
1095 mutex_lock(&dev->queue_mutex); in sahara_sha_enqueue()
1096 ret = crypto_enqueue_request(&dev->queue, &req->base); in sahara_sha_enqueue()
1097 mutex_unlock(&dev->queue_mutex); in sahara_sha_enqueue()
1099 wake_up_process(dev->kthread); in sahara_sha_enqueue()
1309 struct sahara_dev *dev = (struct sahara_dev *)data; in sahara_irq_handler() local
1310 unsigned int stat = sahara_read(dev, SAHARA_REG_STATUS); in sahara_irq_handler()
1311 unsigned int err = sahara_read(dev, SAHARA_REG_ERRSTATUS); in sahara_irq_handler()
1313 sahara_write(dev, SAHARA_CMD_CLEAR_INT | SAHARA_CMD_CLEAR_ERR, in sahara_irq_handler()
1316 sahara_decode_status(dev, stat); in sahara_irq_handler()
1321 dev->error = 0; in sahara_irq_handler()
1323 sahara_decode_error(dev, err); in sahara_irq_handler()
1324 dev->error = -EINVAL; in sahara_irq_handler()
1327 complete(&dev->dma_completion); in sahara_irq_handler()
1333 static int sahara_register_algs(struct sahara_dev *dev) in sahara_register_algs() argument
1351 if (dev->version > SAHARA_VERSION_3) in sahara_register_algs()
1375 static void sahara_unregister_algs(struct sahara_dev *dev) in sahara_unregister_algs() argument
1385 if (dev->version > SAHARA_VERSION_3) in sahara_unregister_algs()
1405 struct sahara_dev *dev; in sahara_probe() local
1412 dev = devm_kzalloc(&pdev->dev, sizeof(struct sahara_dev), GFP_KERNEL); in sahara_probe()
1413 if (dev == NULL) { in sahara_probe()
1414 dev_err(&pdev->dev, "unable to alloc data struct.\n"); in sahara_probe()
1418 dev->device = &pdev->dev; in sahara_probe()
1419 platform_set_drvdata(pdev, dev); in sahara_probe()
1423 dev->regs_base = devm_ioremap_resource(&pdev->dev, res); in sahara_probe()
1424 if (IS_ERR(dev->regs_base)) in sahara_probe()
1425 return PTR_ERR(dev->regs_base); in sahara_probe()
1430 dev_err(&pdev->dev, "failed to get irq resource\n"); in sahara_probe()
1434 err = devm_request_irq(&pdev->dev, irq, sahara_irq_handler, in sahara_probe()
1435 0, dev_name(&pdev->dev), dev); in sahara_probe()
1437 dev_err(&pdev->dev, "failed to request irq\n"); in sahara_probe()
1442 dev->clk_ipg = devm_clk_get(&pdev->dev, "ipg"); in sahara_probe()
1443 if (IS_ERR(dev->clk_ipg)) { in sahara_probe()
1444 dev_err(&pdev->dev, "Could not get ipg clock\n"); in sahara_probe()
1445 return PTR_ERR(dev->clk_ipg); in sahara_probe()
1448 dev->clk_ahb = devm_clk_get(&pdev->dev, "ahb"); in sahara_probe()
1449 if (IS_ERR(dev->clk_ahb)) { in sahara_probe()
1450 dev_err(&pdev->dev, "Could not get ahb clock\n"); in sahara_probe()
1451 return PTR_ERR(dev->clk_ahb); in sahara_probe()
1455 dev->hw_desc[0] = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1457 &dev->hw_phys_desc[0], GFP_KERNEL); in sahara_probe()
1458 if (!dev->hw_desc[0]) { in sahara_probe()
1459 dev_err(&pdev->dev, "Could not allocate hw descriptors\n"); in sahara_probe()
1462 dev->hw_desc[1] = dev->hw_desc[0] + 1; in sahara_probe()
1463 dev->hw_phys_desc[1] = dev->hw_phys_desc[0] + in sahara_probe()
1467 dev->key_base = dmam_alloc_coherent(&pdev->dev, 2 * AES_KEYSIZE_128, in sahara_probe()
1468 &dev->key_phys_base, GFP_KERNEL); in sahara_probe()
1469 if (!dev->key_base) { in sahara_probe()
1470 dev_err(&pdev->dev, "Could not allocate memory for key\n"); in sahara_probe()
1473 dev->iv_base = dev->key_base + AES_KEYSIZE_128; in sahara_probe()
1474 dev->iv_phys_base = dev->key_phys_base + AES_KEYSIZE_128; in sahara_probe()
1477 dev->context_base = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1479 &dev->context_phys_base, GFP_KERNEL); in sahara_probe()
1480 if (!dev->context_base) { in sahara_probe()
1481 dev_err(&pdev->dev, "Could not allocate memory for MDHA context\n"); in sahara_probe()
1486 dev->hw_link[0] = dmam_alloc_coherent(&pdev->dev, in sahara_probe()
1488 &dev->hw_phys_link[0], GFP_KERNEL); in sahara_probe()
1489 if (!dev->hw_link[0]) { in sahara_probe()
1490 dev_err(&pdev->dev, "Could not allocate hw links\n"); in sahara_probe()
1494 dev->hw_phys_link[i] = dev->hw_phys_link[i - 1] + in sahara_probe()
1496 dev->hw_link[i] = dev->hw_link[i - 1] + 1; in sahara_probe()
1499 crypto_init_queue(&dev->queue, SAHARA_QUEUE_LENGTH); in sahara_probe()
1501 spin_lock_init(&dev->lock); in sahara_probe()
1502 mutex_init(&dev->queue_mutex); in sahara_probe()
1504 dev_ptr = dev; in sahara_probe()
1506 dev->kthread = kthread_run(sahara_queue_manage, dev, "sahara_crypto"); in sahara_probe()
1507 if (IS_ERR(dev->kthread)) { in sahara_probe()
1508 return PTR_ERR(dev->kthread); in sahara_probe()
1511 init_completion(&dev->dma_completion); in sahara_probe()
1513 err = clk_prepare_enable(dev->clk_ipg); in sahara_probe()
1516 err = clk_prepare_enable(dev->clk_ahb); in sahara_probe()
1520 version = sahara_read(dev, SAHARA_REG_VERSION); in sahara_probe()
1521 if (of_device_is_compatible(pdev->dev.of_node, "fsl,imx27-sahara")) { in sahara_probe()
1524 } else if (of_device_is_compatible(pdev->dev.of_node, in sahara_probe()
1531 dev_err(&pdev->dev, "SAHARA version %d not supported\n", in sahara_probe()
1536 dev->version = version; in sahara_probe()
1538 sahara_write(dev, SAHARA_CMD_RESET | SAHARA_CMD_MODE_BATCH, in sahara_probe()
1540 sahara_write(dev, SAHARA_CONTROL_SET_THROTTLE(0) | in sahara_probe()
1546 err = sahara_register_algs(dev); in sahara_probe()
1550 dev_info(&pdev->dev, "SAHARA version %d initialized\n", version); in sahara_probe()
1555 kthread_stop(dev->kthread); in sahara_probe()
1557 clk_disable_unprepare(dev->clk_ahb); in sahara_probe()
1559 clk_disable_unprepare(dev->clk_ipg); in sahara_probe()
1566 struct sahara_dev *dev = platform_get_drvdata(pdev); in sahara_remove() local
1568 kthread_stop(dev->kthread); in sahara_remove()
1570 sahara_unregister_algs(dev); in sahara_remove()
1572 clk_disable_unprepare(dev->clk_ipg); in sahara_remove()
1573 clk_disable_unprepare(dev->clk_ahb); in sahara_remove()