Lines Matching refs:dev
49 static void crypto4xx_hw_init(struct crypto4xx_device *dev) in crypto4xx_hw_init() argument
59 writel(PPC4XX_BYTE_ORDER, dev->ce_base + CRYPTO4XX_BYTE_ORDER_CFG); in crypto4xx_hw_init()
70 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
77 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
78 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_PDR_BASE); in crypto4xx_hw_init()
79 writel(dev->pdr_pa, dev->ce_base + CRYPTO4XX_RDR_BASE); in crypto4xx_hw_init()
80 writel(PPC4XX_PRNG_CTRL_AUTO_EN, dev->ce_base + CRYPTO4XX_PRNG_CTRL); in crypto4xx_hw_init()
82 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_L); in crypto4xx_hw_init()
84 writel(rand_num, dev->ce_base + CRYPTO4XX_PRNG_SEED_H); in crypto4xx_hw_init()
88 writel(ring_size.w, dev->ce_base + CRYPTO4XX_RING_SIZE); in crypto4xx_hw_init()
90 writel(ring_ctrl.w, dev->ce_base + CRYPTO4XX_RING_CTRL); in crypto4xx_hw_init()
91 device_ctrl = readl(dev->ce_base + CRYPTO4XX_DEVICE_CTRL); in crypto4xx_hw_init()
93 writel(device_ctrl, dev->ce_base + CRYPTO4XX_DEVICE_CTRL); in crypto4xx_hw_init()
94 writel(dev->gdr_pa, dev->ce_base + CRYPTO4XX_GATH_RING_BASE); in crypto4xx_hw_init()
95 writel(dev->sdr_pa, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE); in crypto4xx_hw_init()
99 writel(part_ring_size.w, dev->ce_base + CRYPTO4XX_PART_RING_SIZE); in crypto4xx_hw_init()
100 writel(PPC4XX_SD_BUFFER_SIZE, dev->ce_base + CRYPTO4XX_PART_RING_CFG); in crypto4xx_hw_init()
104 writel(io_threshold.w, dev->ce_base + CRYPTO4XX_IO_THRESHOLD); in crypto4xx_hw_init()
105 writel(0, dev->ce_base + CRYPTO4XX_PDR_BASE_UADDR); in crypto4xx_hw_init()
106 writel(0, dev->ce_base + CRYPTO4XX_RDR_BASE_UADDR); in crypto4xx_hw_init()
107 writel(0, dev->ce_base + CRYPTO4XX_PKT_SRC_UADDR); in crypto4xx_hw_init()
108 writel(0, dev->ce_base + CRYPTO4XX_PKT_DEST_UADDR); in crypto4xx_hw_init()
109 writel(0, dev->ce_base + CRYPTO4XX_SA_UADDR); in crypto4xx_hw_init()
110 writel(0, dev->ce_base + CRYPTO4XX_GATH_RING_BASE_UADDR); in crypto4xx_hw_init()
111 writel(0, dev->ce_base + CRYPTO4XX_SCAT_RING_BASE_UADDR); in crypto4xx_hw_init()
118 writel(pe_dma_cfg.w, dev->ce_base + CRYPTO4XX_PE_DMA_CFG); in crypto4xx_hw_init()
120 writel(PPC4XX_INTERRUPT_CLR, dev->ce_base + CRYPTO4XX_INT_CLR); in crypto4xx_hw_init()
121 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT); in crypto4xx_hw_init()
122 writel(PPC4XX_INT_DESCR_CNT, dev->ce_base + CRYPTO4XX_INT_DESCR_CNT); in crypto4xx_hw_init()
123 writel(PPC4XX_INT_CFG, dev->ce_base + CRYPTO4XX_INT_CFG); in crypto4xx_hw_init()
124 writel(PPC4XX_PD_DONE_INT, dev->ce_base + CRYPTO4XX_INT_EN); in crypto4xx_hw_init()
129 ctx->sa_in = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4, in crypto4xx_alloc_sa()
134 ctx->sa_out = dma_alloc_coherent(ctx->dev->core_dev->device, size * 4, in crypto4xx_alloc_sa()
137 dma_free_coherent(ctx->dev->core_dev->device, in crypto4xx_alloc_sa()
153 dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4, in crypto4xx_free_sa()
156 dma_free_coherent(ctx->dev->core_dev->device, ctx->sa_len * 4, in crypto4xx_free_sa()
166 ctx->state_record = dma_alloc_coherent(ctx->dev->core_dev->device, in crypto4xx_alloc_state_record()
179 dma_free_coherent(ctx->dev->core_dev->device, in crypto4xx_free_state_record()
191 static u32 crypto4xx_build_pdr(struct crypto4xx_device *dev) in crypto4xx_build_pdr() argument
195 dev->pdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
197 &dev->pdr_pa, GFP_ATOMIC); in crypto4xx_build_pdr()
198 if (!dev->pdr) in crypto4xx_build_pdr()
201 dev->pdr_uinfo = kzalloc(sizeof(struct pd_uinfo) * PPC4XX_NUM_PD, in crypto4xx_build_pdr()
203 if (!dev->pdr_uinfo) { in crypto4xx_build_pdr()
204 dma_free_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
206 dev->pdr, in crypto4xx_build_pdr()
207 dev->pdr_pa); in crypto4xx_build_pdr()
210 memset(dev->pdr, 0, sizeof(struct ce_pd) * PPC4XX_NUM_PD); in crypto4xx_build_pdr()
211 dev->shadow_sa_pool = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
213 &dev->shadow_sa_pool_pa, in crypto4xx_build_pdr()
215 if (!dev->shadow_sa_pool) in crypto4xx_build_pdr()
218 dev->shadow_sr_pool = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_pdr()
220 &dev->shadow_sr_pool_pa, GFP_ATOMIC); in crypto4xx_build_pdr()
221 if (!dev->shadow_sr_pool) in crypto4xx_build_pdr()
224 pd_uinfo = (struct pd_uinfo *) (dev->pdr_uinfo + in crypto4xx_build_pdr()
228 pd_uinfo->sa_va = dev->shadow_sa_pool + 256 * i; in crypto4xx_build_pdr()
229 pd_uinfo->sa_pa = dev->shadow_sa_pool_pa + 256 * i; in crypto4xx_build_pdr()
232 pd_uinfo->sr_va = dev->shadow_sr_pool + in crypto4xx_build_pdr()
234 pd_uinfo->sr_pa = dev->shadow_sr_pool_pa + in crypto4xx_build_pdr()
241 static void crypto4xx_destroy_pdr(struct crypto4xx_device *dev) in crypto4xx_destroy_pdr() argument
243 if (dev->pdr != NULL) in crypto4xx_destroy_pdr()
244 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_pdr()
246 dev->pdr, dev->pdr_pa); in crypto4xx_destroy_pdr()
247 if (dev->shadow_sa_pool) in crypto4xx_destroy_pdr()
248 dma_free_coherent(dev->core_dev->device, 256 * PPC4XX_NUM_PD, in crypto4xx_destroy_pdr()
249 dev->shadow_sa_pool, dev->shadow_sa_pool_pa); in crypto4xx_destroy_pdr()
250 if (dev->shadow_sr_pool) in crypto4xx_destroy_pdr()
251 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_pdr()
253 dev->shadow_sr_pool, dev->shadow_sr_pool_pa); in crypto4xx_destroy_pdr()
255 kfree(dev->pdr_uinfo); in crypto4xx_destroy_pdr()
258 static u32 crypto4xx_get_pd_from_pdr_nolock(struct crypto4xx_device *dev) in crypto4xx_get_pd_from_pdr_nolock() argument
263 retval = dev->pdr_head; in crypto4xx_get_pd_from_pdr_nolock()
264 tmp = (dev->pdr_head + 1) % PPC4XX_NUM_PD; in crypto4xx_get_pd_from_pdr_nolock()
266 if (tmp == dev->pdr_tail) in crypto4xx_get_pd_from_pdr_nolock()
269 dev->pdr_head = tmp; in crypto4xx_get_pd_from_pdr_nolock()
274 static u32 crypto4xx_put_pd_to_pdr(struct crypto4xx_device *dev, u32 idx) in crypto4xx_put_pd_to_pdr() argument
279 pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo + in crypto4xx_put_pd_to_pdr()
281 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_pd_to_pdr()
282 if (dev->pdr_tail != PPC4XX_LAST_PD) in crypto4xx_put_pd_to_pdr()
283 dev->pdr_tail++; in crypto4xx_put_pd_to_pdr()
285 dev->pdr_tail = 0; in crypto4xx_put_pd_to_pdr()
287 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_pd_to_pdr()
292 static struct ce_pd *crypto4xx_get_pdp(struct crypto4xx_device *dev, in crypto4xx_get_pdp() argument
295 *pd_dma = dev->pdr_pa + sizeof(struct ce_pd) * idx; in crypto4xx_get_pdp()
297 return dev->pdr + sizeof(struct ce_pd) * idx; in crypto4xx_get_pdp()
305 static u32 crypto4xx_build_gdr(struct crypto4xx_device *dev) in crypto4xx_build_gdr() argument
307 dev->gdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_gdr()
309 &dev->gdr_pa, GFP_ATOMIC); in crypto4xx_build_gdr()
310 if (!dev->gdr) in crypto4xx_build_gdr()
313 memset(dev->gdr, 0, sizeof(struct ce_gd) * PPC4XX_NUM_GD); in crypto4xx_build_gdr()
318 static inline void crypto4xx_destroy_gdr(struct crypto4xx_device *dev) in crypto4xx_destroy_gdr() argument
320 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_gdr()
322 dev->gdr, dev->gdr_pa); in crypto4xx_destroy_gdr()
329 u32 crypto4xx_get_n_gd(struct crypto4xx_device *dev, int n) in crypto4xx_get_n_gd() argument
336 retval = dev->gdr_head; in crypto4xx_get_n_gd()
337 tmp = (dev->gdr_head + n) % PPC4XX_NUM_GD; in crypto4xx_get_n_gd()
338 if (dev->gdr_head > dev->gdr_tail) { in crypto4xx_get_n_gd()
339 if (tmp < dev->gdr_head && tmp >= dev->gdr_tail) in crypto4xx_get_n_gd()
341 } else if (dev->gdr_head < dev->gdr_tail) { in crypto4xx_get_n_gd()
342 if (tmp < dev->gdr_head || tmp >= dev->gdr_tail) in crypto4xx_get_n_gd()
345 dev->gdr_head = tmp; in crypto4xx_get_n_gd()
350 static u32 crypto4xx_put_gd_to_gdr(struct crypto4xx_device *dev) in crypto4xx_put_gd_to_gdr() argument
354 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
355 if (dev->gdr_tail == dev->gdr_head) { in crypto4xx_put_gd_to_gdr()
356 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
360 if (dev->gdr_tail != PPC4XX_LAST_GD) in crypto4xx_put_gd_to_gdr()
361 dev->gdr_tail++; in crypto4xx_put_gd_to_gdr()
363 dev->gdr_tail = 0; in crypto4xx_put_gd_to_gdr()
365 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_gd_to_gdr()
370 static inline struct ce_gd *crypto4xx_get_gdp(struct crypto4xx_device *dev, in crypto4xx_get_gdp() argument
373 *gd_dma = dev->gdr_pa + sizeof(struct ce_gd) * idx; in crypto4xx_get_gdp()
375 return (struct ce_gd *) (dev->gdr + sizeof(struct ce_gd) * idx); in crypto4xx_get_gdp()
383 static u32 crypto4xx_build_sdr(struct crypto4xx_device *dev) in crypto4xx_build_sdr() argument
389 dev->sdr = dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_sdr()
391 &dev->sdr_pa, GFP_ATOMIC); in crypto4xx_build_sdr()
392 if (!dev->sdr) in crypto4xx_build_sdr()
395 dev->scatter_buffer_size = PPC4XX_SD_BUFFER_SIZE; in crypto4xx_build_sdr()
396 dev->scatter_buffer_va = in crypto4xx_build_sdr()
397 dma_alloc_coherent(dev->core_dev->device, in crypto4xx_build_sdr()
398 dev->scatter_buffer_size * PPC4XX_NUM_SD, in crypto4xx_build_sdr()
399 &dev->scatter_buffer_pa, GFP_ATOMIC); in crypto4xx_build_sdr()
400 if (!dev->scatter_buffer_va) { in crypto4xx_build_sdr()
401 dma_free_coherent(dev->core_dev->device, in crypto4xx_build_sdr()
403 dev->sdr, dev->sdr_pa); in crypto4xx_build_sdr()
407 sd_array = dev->sdr; in crypto4xx_build_sdr()
410 sd_array[i].ptr = dev->scatter_buffer_pa + in crypto4xx_build_sdr()
411 dev->scatter_buffer_size * i; in crypto4xx_build_sdr()
417 static void crypto4xx_destroy_sdr(struct crypto4xx_device *dev) in crypto4xx_destroy_sdr() argument
419 if (dev->sdr != NULL) in crypto4xx_destroy_sdr()
420 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_sdr()
422 dev->sdr, dev->sdr_pa); in crypto4xx_destroy_sdr()
424 if (dev->scatter_buffer_va != NULL) in crypto4xx_destroy_sdr()
425 dma_free_coherent(dev->core_dev->device, in crypto4xx_destroy_sdr()
426 dev->scatter_buffer_size * PPC4XX_NUM_SD, in crypto4xx_destroy_sdr()
427 dev->scatter_buffer_va, in crypto4xx_destroy_sdr()
428 dev->scatter_buffer_pa); in crypto4xx_destroy_sdr()
435 static u32 crypto4xx_get_n_sd(struct crypto4xx_device *dev, int n) in crypto4xx_get_n_sd() argument
443 retval = dev->sdr_head; in crypto4xx_get_n_sd()
444 tmp = (dev->sdr_head + n) % PPC4XX_NUM_SD; in crypto4xx_get_n_sd()
445 if (dev->sdr_head > dev->gdr_tail) { in crypto4xx_get_n_sd()
446 if (tmp < dev->sdr_head && tmp >= dev->sdr_tail) in crypto4xx_get_n_sd()
448 } else if (dev->sdr_head < dev->sdr_tail) { in crypto4xx_get_n_sd()
449 if (tmp < dev->sdr_head || tmp >= dev->sdr_tail) in crypto4xx_get_n_sd()
452 dev->sdr_head = tmp; in crypto4xx_get_n_sd()
457 static u32 crypto4xx_put_sd_to_sdr(struct crypto4xx_device *dev) in crypto4xx_put_sd_to_sdr() argument
461 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
462 if (dev->sdr_tail == dev->sdr_head) { in crypto4xx_put_sd_to_sdr()
463 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
466 if (dev->sdr_tail != PPC4XX_LAST_SD) in crypto4xx_put_sd_to_sdr()
467 dev->sdr_tail++; in crypto4xx_put_sd_to_sdr()
469 dev->sdr_tail = 0; in crypto4xx_put_sd_to_sdr()
470 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_put_sd_to_sdr()
475 static inline struct ce_sd *crypto4xx_get_sdp(struct crypto4xx_device *dev, in crypto4xx_get_sdp() argument
478 *sd_dma = dev->sdr_pa + sizeof(struct ce_sd) * idx; in crypto4xx_get_sdp()
480 return (struct ce_sd *)(dev->sdr + sizeof(struct ce_sd) * idx); in crypto4xx_get_sdp()
483 static u32 crypto4xx_fill_one_page(struct crypto4xx_device *dev, in crypto4xx_fill_one_page() argument
489 if (*length > dev->scatter_buffer_size) { in crypto4xx_fill_one_page()
491 dev->scatter_buffer_va + in crypto4xx_fill_one_page()
492 *idx * dev->scatter_buffer_size + *offset, in crypto4xx_fill_one_page()
493 dev->scatter_buffer_size); in crypto4xx_fill_one_page()
495 *length -= dev->scatter_buffer_size; in crypto4xx_fill_one_page()
496 *nbytes -= dev->scatter_buffer_size; in crypto4xx_fill_one_page()
501 *addr = *addr + dev->scatter_buffer_size; in crypto4xx_fill_one_page()
503 } else if (*length < dev->scatter_buffer_size) { in crypto4xx_fill_one_page()
505 dev->scatter_buffer_va + in crypto4xx_fill_one_page()
506 *idx * dev->scatter_buffer_size + *offset, *length); in crypto4xx_fill_one_page()
507 if ((*offset + *length) == dev->scatter_buffer_size) { in crypto4xx_fill_one_page()
521 len = (*nbytes <= dev->scatter_buffer_size) ? in crypto4xx_fill_one_page()
522 (*nbytes) : dev->scatter_buffer_size; in crypto4xx_fill_one_page()
524 dev->scatter_buffer_va + in crypto4xx_fill_one_page()
525 *idx * dev->scatter_buffer_size + *offset, in crypto4xx_fill_one_page()
539 static void crypto4xx_copy_pkt_to_dst(struct crypto4xx_device *dev, in crypto4xx_copy_pkt_to_dst() argument
560 addr = dma_map_page(dev->core_dev->device, sg_page(sg), in crypto4xx_copy_pkt_to_dst()
565 while (crypto4xx_fill_one_page(dev, &addr, &len, in crypto4xx_copy_pkt_to_dst()
572 len = (nbytes <= (dev->scatter_buffer_size - offset)) ? in crypto4xx_copy_pkt_to_dst()
573 nbytes : (dev->scatter_buffer_size - offset); in crypto4xx_copy_pkt_to_dst()
575 while (crypto4xx_fill_one_page(dev, &addr, &len, in crypto4xx_copy_pkt_to_dst()
583 while (crypto4xx_fill_one_page(dev, &addr, in crypto4xx_copy_pkt_to_dst()
607 static void crypto4xx_ret_sg_desc(struct crypto4xx_device *dev, in crypto4xx_ret_sg_desc() argument
613 crypto4xx_put_gd_to_gdr(dev); in crypto4xx_ret_sg_desc()
619 crypto4xx_put_sd_to_sdr(dev); in crypto4xx_ret_sg_desc()
626 static u32 crypto4xx_ablkcipher_done(struct crypto4xx_device *dev, in crypto4xx_ablkcipher_done() argument
639 crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, ablk_req->nbytes, in crypto4xx_ablkcipher_done()
643 addr = dma_map_page(dev->core_dev->device, sg_page(dst), in crypto4xx_ablkcipher_done()
646 crypto4xx_ret_sg_desc(dev, pd_uinfo); in crypto4xx_ablkcipher_done()
653 static u32 crypto4xx_ahash_done(struct crypto4xx_device *dev, in crypto4xx_ahash_done() argument
664 crypto4xx_ret_sg_desc(dev, pd_uinfo); in crypto4xx_ahash_done()
672 static u32 crypto4xx_pd_done(struct crypto4xx_device *dev, u32 idx) in crypto4xx_pd_done() argument
677 pd = dev->pdr + sizeof(struct ce_pd)*idx; in crypto4xx_pd_done()
678 pd_uinfo = dev->pdr_uinfo + sizeof(struct pd_uinfo)*idx; in crypto4xx_pd_done()
681 return crypto4xx_ablkcipher_done(dev, pd_uinfo, pd); in crypto4xx_pd_done()
683 return crypto4xx_ahash_done(dev, pd_uinfo); in crypto4xx_pd_done()
724 crypto4xx_destroy_pdr(core_dev->dev); in crypto4xx_stop_all()
725 crypto4xx_destroy_gdr(core_dev->dev); in crypto4xx_stop_all()
726 crypto4xx_destroy_sdr(core_dev->dev); in crypto4xx_stop_all()
727 iounmap(core_dev->dev->ce_base); in crypto4xx_stop_all()
728 kfree(core_dev->dev); in crypto4xx_stop_all()
732 void crypto4xx_return_pd(struct crypto4xx_device *dev, in crypto4xx_return_pd() argument
737 dev->pdr_head = pd_entry; in crypto4xx_return_pd()
766 struct crypto4xx_device *dev = ctx->dev; in crypto4xx_build_pd() local
806 spin_lock_irqsave(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
808 fst_gd = crypto4xx_get_n_gd(dev, num_gd); in crypto4xx_build_pd()
810 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
815 fst_sd = crypto4xx_get_n_sd(dev, num_sd); in crypto4xx_build_pd()
818 dev->gdr_head = fst_gd; in crypto4xx_build_pd()
819 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
823 pd_entry = crypto4xx_get_pd_from_pdr_nolock(dev); in crypto4xx_build_pd()
826 dev->gdr_head = fst_gd; in crypto4xx_build_pd()
828 dev->sdr_head = fst_sd; in crypto4xx_build_pd()
829 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
832 spin_unlock_irqrestore(&dev->core_dev->lock, flags); in crypto4xx_build_pd()
834 pd_uinfo = (struct pd_uinfo *)(dev->pdr_uinfo + in crypto4xx_build_pd()
836 pd = crypto4xx_get_pdp(dev, &pd_dma, pd_entry); in crypto4xx_build_pd()
870 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx); in crypto4xx_build_pd()
879 addr = dma_map_page(dev->core_dev->device, sg_page(sg), in crypto4xx_build_pd()
889 gd = crypto4xx_get_gdp(dev, &gd_dma, gd_idx); in crypto4xx_build_pd()
893 pd->src = (u32)dma_map_page(dev->core_dev->device, sg_page(src), in crypto4xx_build_pd()
919 pd->dest = (u32)dma_map_page(dev->core_dev->device, in crypto4xx_build_pd()
931 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx); in crypto4xx_build_pd()
944 sd = crypto4xx_get_sdp(dev, &sd_dma, sd_idx); in crypto4xx_build_pd()
965 writel(1, dev->ce_base + CRYPTO4XX_INT_DESCR_RD); in crypto4xx_build_pd()
978 ctx->dev = amcc_alg->dev; in crypto4xx_alg_init()
1020 alg->dev = sec_dev; in crypto4xx_register_alg()
1063 struct device *dev = (struct device *)data; in crypto4xx_bh_tasklet_cb() local
1064 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_bh_tasklet_cb()
1069 while (core_dev->dev->pdr_head != core_dev->dev->pdr_tail) { in crypto4xx_bh_tasklet_cb()
1070 tail = core_dev->dev->pdr_tail; in crypto4xx_bh_tasklet_cb()
1071 pd_uinfo = core_dev->dev->pdr_uinfo + in crypto4xx_bh_tasklet_cb()
1073 pd = core_dev->dev->pdr + sizeof(struct ce_pd) * tail; in crypto4xx_bh_tasklet_cb()
1078 crypto4xx_pd_done(core_dev->dev, tail); in crypto4xx_bh_tasklet_cb()
1079 crypto4xx_put_pd_to_pdr(core_dev->dev, tail); in crypto4xx_bh_tasklet_cb()
1093 struct device *dev = (struct device *)data; in crypto4xx_ce_interrupt_handler() local
1094 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_ce_interrupt_handler()
1096 if (!core_dev->dev->ce_base) in crypto4xx_ce_interrupt_handler()
1100 core_dev->dev->ce_base + CRYPTO4XX_INT_CLR); in crypto4xx_ce_interrupt_handler()
1142 struct device *dev = &ofdev->dev; in crypto4xx_probe() local
1145 rc = of_address_to_resource(ofdev->dev.of_node, 0, &res); in crypto4xx_probe()
1175 dev_set_drvdata(dev, core_dev); in crypto4xx_probe()
1177 core_dev->dev = kzalloc(sizeof(struct crypto4xx_device), GFP_KERNEL); in crypto4xx_probe()
1178 if (!core_dev->dev) in crypto4xx_probe()
1181 core_dev->dev->core_dev = core_dev; in crypto4xx_probe()
1182 core_dev->device = dev; in crypto4xx_probe()
1184 INIT_LIST_HEAD(&core_dev->dev->alg_list); in crypto4xx_probe()
1185 rc = crypto4xx_build_pdr(core_dev->dev); in crypto4xx_probe()
1189 rc = crypto4xx_build_gdr(core_dev->dev); in crypto4xx_probe()
1193 rc = crypto4xx_build_sdr(core_dev->dev); in crypto4xx_probe()
1199 (unsigned long) dev); in crypto4xx_probe()
1202 core_dev->irq = irq_of_parse_and_map(ofdev->dev.of_node, 0); in crypto4xx_probe()
1204 core_dev->dev->name, dev); in crypto4xx_probe()
1208 core_dev->dev->ce_base = of_iomap(ofdev->dev.of_node, 0); in crypto4xx_probe()
1209 if (!core_dev->dev->ce_base) { in crypto4xx_probe()
1210 dev_err(dev, "failed to of_iomap\n"); in crypto4xx_probe()
1216 crypto4xx_hw_init(core_dev->dev); in crypto4xx_probe()
1219 rc = crypto4xx_register_alg(core_dev->dev, crypto4xx_alg, in crypto4xx_probe()
1227 iounmap(core_dev->dev->ce_base); in crypto4xx_probe()
1229 free_irq(core_dev->irq, dev); in crypto4xx_probe()
1233 crypto4xx_destroy_sdr(core_dev->dev); in crypto4xx_probe()
1235 crypto4xx_destroy_gdr(core_dev->dev); in crypto4xx_probe()
1237 crypto4xx_destroy_pdr(core_dev->dev); in crypto4xx_probe()
1239 kfree(core_dev->dev); in crypto4xx_probe()
1248 struct device *dev = &ofdev->dev; in crypto4xx_remove() local
1249 struct crypto4xx_core_device *core_dev = dev_get_drvdata(dev); in crypto4xx_remove()
1251 free_irq(core_dev->irq, dev); in crypto4xx_remove()
1256 crypto4xx_unregister_alg(core_dev->dev); in crypto4xx_remove()