Lines Matching refs:vsg

64 via_unmap_blit_from_device(struct pci_dev *pdev, drm_via_sg_info_t *vsg)  in via_unmap_blit_from_device()  argument
66 int num_desc = vsg->num_desc; in via_unmap_blit_from_device()
67 unsigned cur_descriptor_page = num_desc / vsg->descriptors_per_page; in via_unmap_blit_from_device()
68 unsigned descriptor_this_page = num_desc % vsg->descriptors_per_page; in via_unmap_blit_from_device()
69 drm_via_descriptor_t *desc_ptr = vsg->desc_pages[cur_descriptor_page] + in via_unmap_blit_from_device()
71 dma_addr_t next = vsg->chain_start; in via_unmap_blit_from_device()
76 descriptor_this_page = vsg->descriptors_per_page - 1; in via_unmap_blit_from_device()
77 desc_ptr = vsg->desc_pages[cur_descriptor_page] + in via_unmap_blit_from_device()
81 dma_unmap_page(&pdev->dev, desc_ptr->mem_addr, desc_ptr->size, vsg->direction); in via_unmap_blit_from_device()
97 drm_via_sg_info_t *vsg, in via_map_blit_for_device() argument
115 desc_ptr = vsg->desc_pages[cur_descriptor_page]; in via_map_blit_for_device()
131 vsg->pages[VIA_PFN(cur_mem) - in via_map_blit_for_device()
134 vsg->direction); in via_map_blit_for_device()
142 if (++num_descriptors_this_page >= vsg->descriptors_per_page) { in via_map_blit_for_device()
144 desc_ptr = vsg->desc_pages[++cur_descriptor_page]; in via_map_blit_for_device()
158 vsg->chain_start = next; in via_map_blit_for_device()
159 vsg->state = dr_via_device_mapped; in via_map_blit_for_device()
161 vsg->num_desc = num_desc; in via_map_blit_for_device()
172 via_free_sg_info(struct pci_dev *pdev, drm_via_sg_info_t *vsg) in via_free_sg_info() argument
177 switch (vsg->state) { in via_free_sg_info()
179 via_unmap_blit_from_device(pdev, vsg); in via_free_sg_info()
181 for (i = 0; i < vsg->num_desc_pages; ++i) { in via_free_sg_info()
182 if (vsg->desc_pages[i] != NULL) in via_free_sg_info()
183 free_page((unsigned long)vsg->desc_pages[i]); in via_free_sg_info()
185 kfree(vsg->desc_pages); in via_free_sg_info()
187 for (i = 0; i < vsg->num_pages; ++i) { in via_free_sg_info()
188 if (NULL != (page = vsg->pages[i])) { in via_free_sg_info()
189 if (!PageReserved(page) && (DMA_FROM_DEVICE == vsg->direction)) in via_free_sg_info()
195 vfree(vsg->pages); in via_free_sg_info()
197 vsg->state = dr_via_sg_init; in via_free_sg_info()
199 vfree(vsg->bounce_buffer); in via_free_sg_info()
200 vsg->bounce_buffer = NULL; in via_free_sg_info()
201 vsg->free_on_sequence = 0; in via_free_sg_info()
209 via_fire_dmablit(struct drm_device *dev, drm_via_sg_info_t *vsg, int engine) in via_fire_dmablit() argument
219 VIA_WRITE(VIA_PCI_DMA_DPR0 + engine*0x10, vsg->chain_start); in via_fire_dmablit()
231 via_lock_all_dma_pages(drm_via_sg_info_t *vsg, drm_via_dmablit_t *xfer) in via_lock_all_dma_pages() argument
235 vsg->num_pages = VIA_PFN(xfer->mem_addr + (xfer->num_lines * xfer->mem_stride - 1)) - in via_lock_all_dma_pages()
238 vsg->pages = vzalloc(sizeof(struct page *) * vsg->num_pages); in via_lock_all_dma_pages()
239 if (NULL == vsg->pages) in via_lock_all_dma_pages()
244 vsg->num_pages, in via_lock_all_dma_pages()
245 (vsg->direction == DMA_FROM_DEVICE), in via_lock_all_dma_pages()
246 0, vsg->pages, NULL); in via_lock_all_dma_pages()
249 if (ret != vsg->num_pages) { in via_lock_all_dma_pages()
252 vsg->state = dr_via_pages_locked; in via_lock_all_dma_pages()
255 vsg->state = dr_via_pages_locked; in via_lock_all_dma_pages()
267 via_alloc_desc_pages(drm_via_sg_info_t *vsg) in via_alloc_desc_pages() argument
271 vsg->descriptors_per_page = PAGE_SIZE / sizeof(drm_via_descriptor_t); in via_alloc_desc_pages()
272 vsg->num_desc_pages = (vsg->num_desc + vsg->descriptors_per_page - 1) / in via_alloc_desc_pages()
273 vsg->descriptors_per_page; in via_alloc_desc_pages()
275 if (NULL == (vsg->desc_pages = kcalloc(vsg->num_desc_pages, sizeof(void *), GFP_KERNEL))) in via_alloc_desc_pages()
278 vsg->state = dr_via_desc_pages_alloc; in via_alloc_desc_pages()
279 for (i = 0; i < vsg->num_desc_pages; ++i) { in via_alloc_desc_pages()
280 if (NULL == (vsg->desc_pages[i] = in via_alloc_desc_pages()
284 DRM_DEBUG("Allocated %d pages for %d descriptors.\n", vsg->num_desc_pages, in via_alloc_desc_pages()
285 vsg->num_desc); in via_alloc_desc_pages()
578 via_build_sg_info(struct drm_device *dev, drm_via_sg_info_t *vsg, drm_via_dmablit_t *xfer) in via_build_sg_info() argument
583 vsg->direction = (draw) ? DMA_TO_DEVICE : DMA_FROM_DEVICE; in via_build_sg_info()
584 vsg->bounce_buffer = NULL; in via_build_sg_info()
586 vsg->state = dr_via_sg_init; in via_build_sg_info()
657 if (0 != (ret = via_lock_all_dma_pages(vsg, xfer))) { in via_build_sg_info()
659 via_free_sg_info(dev->pdev, vsg); in via_build_sg_info()
663 via_map_blit_for_device(dev->pdev, xfer, vsg, 0); in via_build_sg_info()
664 if (0 != (ret = via_alloc_desc_pages(vsg))) { in via_build_sg_info()
666 via_free_sg_info(dev->pdev, vsg); in via_build_sg_info()
669 via_map_blit_for_device(dev->pdev, xfer, vsg, 1); in via_build_sg_info()
728 drm_via_sg_info_t *vsg; in via_dmablit() local
743 if (NULL == (vsg = kmalloc(sizeof(*vsg), GFP_KERNEL))) { in via_dmablit()
747 if (0 != (ret = via_build_sg_info(dev, vsg, xfer))) { in via_dmablit()
749 kfree(vsg); in via_dmablit()
754 blitq->blits[blitq->head++] = vsg; in via_dmablit()