Lines Matching refs:ce_kern

266 tioce_alloc_map(struct tioce_kernel *ce_kern, int type, int port,  in tioce_alloc_map()  argument
284 ce_mmr = (struct tioce __iomem *)ce_kern->ce_common->ce_pcibus.bs_base; in tioce_alloc_map()
294 ate_shadow = ce_kern->ce_ate3240_shadow; in tioce_alloc_map()
296 pagesize = ce_kern->ce_ate3240_pagesize; in tioce_alloc_map()
303 ate_shadow = ce_kern->ce_ate40_shadow; in tioce_alloc_map()
316 ate_shadow = ce_kern->ce_ate3240_shadow; in tioce_alloc_map()
360 tioce_mmr_storei(ce_kern, &ate_reg[i + j], ate); in tioce_alloc_map()
372 list_add(&map->ce_dmamap_list, &ce_kern->ce_dmamap_list); in tioce_alloc_map()
390 struct tioce_kernel *ce_kern; in tioce_dma_d32() local
401 pcidev_to_tioce(pdev, &ce_mmr, &ce_kern, &port); in tioce_dma_d32()
403 if (ce_kern->ce_port[port].dirmap_refcnt == 0) { in tioce_dma_d32()
406 ce_kern->ce_port[port].dirmap_shadow = ct_upper; in tioce_dma_d32()
407 tioce_mmr_storei(ce_kern, &ce_mmr->ce_ure_dir_map[port], in tioce_dma_d32()
412 dma_ok = (ce_kern->ce_port[port].dirmap_shadow == ct_upper); in tioce_dma_d32()
415 ce_kern->ce_port[port].dirmap_refcnt++; in tioce_dma_d32()
463 struct tioce_kernel *ce_kern; in tioce_dma_unmap() local
468 pcidev_to_tioce(pdev, &ce_mmr, &ce_kern, &port); in tioce_dma_unmap()
475 spin_lock_irqsave(&ce_kern->ce_lock, flags); in tioce_dma_unmap()
478 if (--ce_kern->ce_port[port].dirmap_refcnt == 0) { in tioce_dma_unmap()
479 ce_kern->ce_port[port].dirmap_shadow = 0; in tioce_dma_unmap()
480 tioce_mmr_storei(ce_kern, &ce_mmr->ce_ure_dir_map[port], in tioce_dma_unmap()
486 list_for_each_entry(map, &ce_kern->ce_dmamap_list, in tioce_dma_unmap()
495 if (&map->ce_dmamap_list == &ce_kern->ce_dmamap_list) { in tioce_dma_unmap()
502 tioce_mmr_storei(ce_kern, &map->ate_hw[i], 0); in tioce_dma_unmap()
510 spin_unlock_irqrestore(&ce_kern->ce_lock, flags); in tioce_dma_unmap()
529 struct tioce_kernel *ce_kern; in tioce_do_dma_map() local
554 pcidev_to_tioce(pdev, NULL, &ce_kern, &port); in tioce_do_dma_map()
556 spin_lock_irqsave(&ce_kern->ce_lock, flags); in tioce_do_dma_map()
564 list_for_each_entry(map, &ce_kern->ce_dmamap_list, ce_dmamap_list) { in tioce_do_dma_map()
593 mapaddr = tioce_alloc_map(ce_kern, TIOCE_ATE_M40S, in tioce_do_dma_map()
598 tioce_alloc_map(ce_kern, TIOCE_ATE_M40, -1, in tioce_do_dma_map()
602 mapaddr = tioce_alloc_map(ce_kern, TIOCE_ATE_M40, -1, in tioce_do_dma_map()
607 tioce_alloc_map(ce_kern, TIOCE_ATE_M40S, in tioce_do_dma_map()
624 tioce_alloc_map(ce_kern, TIOCE_ATE_M32, -1, ct_addr, in tioce_do_dma_map()
627 spin_unlock_irqrestore(&ce_kern->ce_lock, flags); in tioce_do_dma_map()
702 tioce_reserve_m32(struct tioce_kernel *ce_kern, u64 base, u64 limit) in tioce_reserve_m32() argument
707 ce_mmr = (struct tioce __iomem *)ce_kern->ce_common->ce_pcibus.bs_base; in tioce_reserve_m32()
708 ps = ce_kern->ce_ate3240_pagesize; in tioce_reserve_m32()
722 ce_kern->ce_ate3240_shadow[ate_index] = ate; in tioce_reserve_m32()
723 tioce_mmr_storei(ce_kern, &ce_mmr->ce_ure_ate3240[ate_index], in tioce_reserve_m32()
862 struct tioce_kernel *ce_kern; in tioce_force_interrupt() local
878 ce_kern = (struct tioce_kernel *)ce_common->ce_kernel_private; in tioce_force_interrupt()
888 tioce_mmr_load(ce_kern, &ce_mmr->ce_adm_int_status, &status); in tioce_force_interrupt()
938 tioce_mmr_storei(ce_kern, &ce_mmr->ce_adm_force_int, force_int_val); in tioce_force_interrupt()
957 struct tioce_kernel *ce_kern; in tioce_target_interrupt() local
968 ce_kern = (struct tioce_kernel *)ce_common->ce_kernel_private; in tioce_target_interrupt()
972 tioce_mmr_seti(ce_kern, &ce_mmr->ce_adm_int_mask, (1UL << bit)); in tioce_target_interrupt()
975 tioce_mmr_storei(ce_kern, &ce_mmr->ce_adm_int_dest[bit], vector); in tioce_target_interrupt()
976 tioce_mmr_clri(ce_kern, &ce_mmr->ce_adm_int_mask, (1UL << bit)); in tioce_target_interrupt()