Lines Matching refs:cd
54 int __genwqe_writeq(struct genwqe_dev *cd, u64 byte_offs, u64 val) in __genwqe_writeq() argument
56 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writeq()
58 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writeq()
61 if (cd->mmio == NULL) in __genwqe_writeq()
67 __raw_writeq((__force u64)cpu_to_be64(val), cd->mmio + byte_offs); in __genwqe_writeq()
78 u64 __genwqe_readq(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readq() argument
80 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readq()
83 if ((cd->err_inject & GENWQE_INJECT_GFIR_FATAL) && in __genwqe_readq()
87 if ((cd->err_inject & GENWQE_INJECT_GFIR_INFO) && in __genwqe_readq()
91 if (cd->mmio == NULL) in __genwqe_readq()
94 return be64_to_cpu((__force __be64)__raw_readq(cd->mmio + byte_offs)); in __genwqe_readq()
105 int __genwqe_writel(struct genwqe_dev *cd, u64 byte_offs, u32 val) in __genwqe_writel() argument
107 struct pci_dev *pci_dev = cd->pci_dev; in __genwqe_writel()
109 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_writel()
112 if (cd->mmio == NULL) in __genwqe_writel()
118 __raw_writel((__force u32)cpu_to_be32(val), cd->mmio + byte_offs); in __genwqe_writel()
129 u32 __genwqe_readl(struct genwqe_dev *cd, u64 byte_offs) in __genwqe_readl() argument
131 if (cd->err_inject & GENWQE_INJECT_HARDWARE_FAILURE) in __genwqe_readl()
134 if (cd->mmio == NULL) in __genwqe_readl()
137 return be32_to_cpu((__force __be32)__raw_readl(cd->mmio + byte_offs)); in __genwqe_readl()
145 int genwqe_read_app_id(struct genwqe_dev *cd, char *app_name, int len) in genwqe_read_app_id() argument
148 u32 app_id = (u32)cd->app_unitcfg; in genwqe_read_app_id()
217 void *__genwqe_alloc_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_alloc_consistent() argument
223 return dma_alloc_coherent(&cd->pci_dev->dev, size, dma_handle, in __genwqe_alloc_consistent()
227 void __genwqe_free_consistent(struct genwqe_dev *cd, size_t size, in __genwqe_free_consistent() argument
233 dma_free_coherent(&cd->pci_dev->dev, size, vaddr, dma_handle); in __genwqe_free_consistent()
236 static void genwqe_unmap_pages(struct genwqe_dev *cd, dma_addr_t *dma_list, in genwqe_unmap_pages() argument
240 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_unmap_pages()
249 static int genwqe_map_pages(struct genwqe_dev *cd, in genwqe_map_pages() argument
254 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_map_pages()
278 genwqe_unmap_pages(cd, dma_list, num_pages); in genwqe_map_pages()
298 int genwqe_alloc_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_alloc_sync_sgl() argument
302 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_alloc_sync_sgl()
323 sgl->sgl = __genwqe_alloc_consistent(cd, sgl->sgl_size, in genwqe_alloc_sync_sgl()
333 sgl->fpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
346 sgl->lpage = __genwqe_alloc_consistent(cd, PAGE_SIZE, in genwqe_alloc_sync_sgl()
361 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_alloc_sync_sgl()
364 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_alloc_sync_sgl()
369 int genwqe_setup_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl, in genwqe_setup_sgl() argument
466 int genwqe_free_sync_sgl(struct genwqe_dev *cd, struct genwqe_sgl *sgl) in genwqe_free_sync_sgl() argument
469 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_free_sync_sgl()
478 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->fpage, in genwqe_free_sync_sgl()
491 __genwqe_free_consistent(cd, PAGE_SIZE, sgl->lpage, in genwqe_free_sync_sgl()
496 __genwqe_free_consistent(cd, sgl->sgl_size, sgl->sgl, in genwqe_free_sync_sgl()
558 int genwqe_user_vmap(struct genwqe_dev *cd, struct dma_mapping *m, void *uaddr, in genwqe_user_vmap() argument
563 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vmap()
604 rc = genwqe_map_pages(cd, m->page_list, m->nr_pages, m->dma_list); in genwqe_user_vmap()
629 int genwqe_user_vunmap(struct genwqe_dev *cd, struct dma_mapping *m, in genwqe_user_vunmap() argument
632 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_user_vunmap()
641 genwqe_unmap_pages(cd, m->dma_list, m->nr_pages); in genwqe_user_vunmap()
665 u8 genwqe_card_type(struct genwqe_dev *cd) in genwqe_card_type() argument
667 u64 card_type = cd->slu_unitcfg; in genwqe_card_type()
676 int genwqe_card_reset(struct genwqe_dev *cd) in genwqe_card_reset() argument
679 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_card_reset()
681 if (!genwqe_is_privileged(cd)) in genwqe_card_reset()
685 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, 0x1ull); in genwqe_card_reset()
687 __genwqe_readq(cd, IO_HSU_FIR_CLR); in genwqe_card_reset()
688 __genwqe_readq(cd, IO_APP_FIR_CLR); in genwqe_card_reset()
689 __genwqe_readq(cd, IO_SLU_FIR_CLR); in genwqe_card_reset()
699 softrst = __genwqe_readq(cd, IO_SLC_CFGREG_SOFTRESET) & 0x3cull; in genwqe_card_reset()
700 __genwqe_writeq(cd, IO_SLC_CFGREG_SOFTRESET, softrst | 0x2ull); in genwqe_card_reset()
705 if (genwqe_need_err_masking(cd)) { in genwqe_card_reset()
708 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_card_reset()
713 int genwqe_read_softreset(struct genwqe_dev *cd) in genwqe_read_softreset() argument
717 if (!genwqe_is_privileged(cd)) in genwqe_read_softreset()
720 bitstream = __genwqe_readq(cd, IO_SLU_BITSTREAM) & 0x1; in genwqe_read_softreset()
721 cd->softreset = (bitstream == 0) ? 0x8ull : 0xcull; in genwqe_read_softreset()
730 int genwqe_set_interrupt_capability(struct genwqe_dev *cd, int count) in genwqe_set_interrupt_capability() argument
733 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_set_interrupt_capability()
739 cd->flags |= GENWQE_FLAG_MSI_ENABLED; in genwqe_set_interrupt_capability()
747 void genwqe_reset_interrupt_capability(struct genwqe_dev *cd) in genwqe_reset_interrupt_capability() argument
749 struct pci_dev *pci_dev = cd->pci_dev; in genwqe_reset_interrupt_capability()
751 if (cd->flags & GENWQE_FLAG_MSI_ENABLED) { in genwqe_reset_interrupt_capability()
753 cd->flags &= ~GENWQE_FLAG_MSI_ENABLED; in genwqe_reset_interrupt_capability()
767 static int set_reg_idx(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg_idx() argument
781 static int set_reg(struct genwqe_dev *cd, struct genwqe_reg *r, in set_reg() argument
784 return set_reg_idx(cd, r, i, m, addr, 0, val); in set_reg()
787 int genwqe_read_ffdc_regs(struct genwqe_dev *cd, struct genwqe_reg *regs, in genwqe_read_ffdc_regs() argument
795 gfir = __genwqe_readq(cd, IO_SLC_CFGREG_GFIR); in genwqe_read_ffdc_regs()
796 set_reg(cd, regs, &idx, max_regs, IO_SLC_CFGREG_GFIR, gfir); in genwqe_read_ffdc_regs()
799 sluid = __genwqe_readq(cd, IO_SLU_UNITCFG); /* 0x00000000 */ in genwqe_read_ffdc_regs()
800 set_reg(cd, regs, &idx, max_regs, IO_SLU_UNITCFG, sluid); in genwqe_read_ffdc_regs()
803 appid = __genwqe_readq(cd, IO_APP_UNITCFG); /* 0x02000000 */ in genwqe_read_ffdc_regs()
804 set_reg(cd, regs, &idx, max_regs, IO_APP_UNITCFG, appid); in genwqe_read_ffdc_regs()
811 ufir = __genwqe_readq(cd, ufir_addr); in genwqe_read_ffdc_regs()
812 set_reg(cd, regs, &idx, max_regs, ufir_addr, ufir); in genwqe_read_ffdc_regs()
816 ufec = __genwqe_readq(cd, ufec_addr); in genwqe_read_ffdc_regs()
817 set_reg(cd, regs, &idx, max_regs, ufec_addr, ufec); in genwqe_read_ffdc_regs()
825 sfir = __genwqe_readq(cd, sfir_addr); in genwqe_read_ffdc_regs()
826 set_reg(cd, regs, &idx, max_regs, sfir_addr, sfir); in genwqe_read_ffdc_regs()
829 sfec = __genwqe_readq(cd, sfec_addr); in genwqe_read_ffdc_regs()
830 set_reg(cd, regs, &idx, max_regs, sfec_addr, sfec); in genwqe_read_ffdc_regs()
845 int genwqe_ffdc_buff_size(struct genwqe_dev *cd, int uid) in genwqe_ffdc_buff_size() argument
852 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_size()
858 val = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_size()
881 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_size()
898 int genwqe_ffdc_buff_read(struct genwqe_dev *cd, int uid, in genwqe_ffdc_buff_read() argument
907 eevptr = __genwqe_readq(cd, eevptr_addr); in genwqe_ffdc_buff_read()
912 e = __genwqe_readq(cd, l_addr); in genwqe_ffdc_buff_read()
923 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
924 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
930 val = __genwqe_readq(cd, d_addr); in genwqe_ffdc_buff_read()
931 set_reg_idx(cd, regs, &idx, max_regs, in genwqe_ffdc_buff_read()
946 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
964 __genwqe_writeq(cd, addr, diag_sel); in genwqe_ffdc_buff_read()
971 val = __genwqe_readq(cd, addr); in genwqe_ffdc_buff_read()
972 set_reg_idx(cd, regs, &idx, max_regs, addr, in genwqe_ffdc_buff_read()
986 int genwqe_write_vreg(struct genwqe_dev *cd, u32 reg, u64 val, int func) in genwqe_write_vreg() argument
988 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_write_vreg()
989 __genwqe_writeq(cd, reg, val); in genwqe_write_vreg()
999 u64 genwqe_read_vreg(struct genwqe_dev *cd, u32 reg, int func) in genwqe_read_vreg() argument
1001 __genwqe_writeq(cd, IO_PF_SLC_VIRTUAL_WINDOW, func & 0xf); in genwqe_read_vreg()
1002 return __genwqe_readq(cd, reg); in genwqe_read_vreg()
1017 int genwqe_base_clock_frequency(struct genwqe_dev *cd) in genwqe_base_clock_frequency() argument
1022 speed = (u16)((cd->slu_unitcfg >> 28) & 0x0full); in genwqe_base_clock_frequency()
1034 void genwqe_stop_traps(struct genwqe_dev *cd) in genwqe_stop_traps() argument
1036 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_SET, 0xcull); in genwqe_stop_traps()
1044 void genwqe_start_traps(struct genwqe_dev *cd) in genwqe_start_traps() argument
1046 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG_CLR, 0xcull); in genwqe_start_traps()
1048 if (genwqe_need_err_masking(cd)) in genwqe_start_traps()
1049 __genwqe_writeq(cd, IO_SLC_MISC_DEBUG, 0x0aull); in genwqe_start_traps()