ttm_dma 542 drivers/gpu/drm/nouveau/nouveau_bo.c struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; ttm_dma 545 drivers/gpu/drm/nouveau/nouveau_bo.c if (!ttm_dma) ttm_dma 552 drivers/gpu/drm/nouveau/nouveau_bo.c for (i = 0; i < ttm_dma->ttm.num_pages; i++) ttm_dma 554 drivers/gpu/drm/nouveau/nouveau_bo.c ttm_dma->dma_address[i], ttm_dma 562 drivers/gpu/drm/nouveau/nouveau_bo.c struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; ttm_dma 565 drivers/gpu/drm/nouveau/nouveau_bo.c if (!ttm_dma) ttm_dma 572 drivers/gpu/drm/nouveau/nouveau_bo.c for (i = 0; i < ttm_dma->ttm.num_pages; i++) ttm_dma 573 drivers/gpu/drm/nouveau/nouveau_bo.c dma_sync_single_for_cpu(drm->dev->dev, ttm_dma->dma_address[i], ttm_dma 1586 drivers/gpu/drm/nouveau/nouveau_bo.c struct ttm_dma_tt *ttm_dma = (void *)ttm; ttm_dma 1599 drivers/gpu/drm/nouveau/nouveau_bo.c ttm_dma->dma_address, ttm->num_pages); ttm_dma 1632 drivers/gpu/drm/nouveau/nouveau_bo.c dma_unmap_page(dev, ttm_dma->dma_address[i], ttm_dma 1634 drivers/gpu/drm/nouveau/nouveau_bo.c ttm_dma->dma_address[i] = 0; ttm_dma 1640 drivers/gpu/drm/nouveau/nouveau_bo.c ttm_dma->dma_address[i] = addr; ttm_dma 1648 drivers/gpu/drm/nouveau/nouveau_bo.c struct ttm_dma_tt *ttm_dma = (void *)ttm; ttm_dma 1675 drivers/gpu/drm/nouveau/nouveau_bo.c if (ttm_dma->dma_address[i]) { ttm_dma 1676 drivers/gpu/drm/nouveau/nouveau_bo.c dma_unmap_page(dev, ttm_dma->dma_address[i], PAGE_SIZE, ttm_dma 835 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c struct ttm_dma_tt *ttm_dma, ttm_dma 839 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 848 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma->dma_address[index] = d_page->dma; ttm_dma 849 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c list_move_tail(&d_page->page_list, &ttm_dma->pages_list); ttm_dma 857 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c static gfp_t ttm_dma_pool_gfp_flags(struct ttm_dma_tt *ttm_dma, bool huge) ttm_dma 859 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 886 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c int ttm_dma_populate(struct ttm_dma_tt *ttm_dma, struct device *dev, ttm_dma 889 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 904 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c INIT_LIST_HEAD(&ttm_dma->pages_list); ttm_dma 915 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c gfp_t gfp_flags = ttm_dma_pool_gfp_flags(ttm_dma, true); ttm_dma 925 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c d_page = ttm_dma_pool_get_pages(pool, ttm_dma, i); ttm_dma 932 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_unpopulate(ttm_dma, dev); ttm_dma 939 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma->dma_address[j] = ttm_dma->dma_address[j - 1] + ttm_dma 952 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c gfp_t gfp_flags = ttm_dma_pool_gfp_flags(ttm_dma, false); ttm_dma 960 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c d_page = ttm_dma_pool_get_pages(pool, ttm_dma, i); ttm_dma 962 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_unpopulate(ttm_dma, dev); ttm_dma 969 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_unpopulate(ttm_dma, dev); ttm_dma 981 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma_unpopulate(ttm_dma, dev); ttm_dma 992 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c void ttm_dma_unpopulate(struct ttm_dma_tt *ttm_dma, struct device *dev) ttm_dma 994 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 1009 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c list_for_each_entry_safe(d_page, next, &ttm_dma->pages_list, ttm_dma 1039 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c list_for_each_entry_safe(d_page, next, &ttm_dma->pages_list, ttm_dma 1060 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c list_splice(&ttm_dma->pages_list, &pool->free_list); ttm_dma 1071 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c INIT_LIST_HEAD(&ttm_dma->pages_list); ttm_dma 1074 drivers/gpu/drm/ttm/ttm_page_alloc_dma.c ttm_dma->dma_address[i] = 0; ttm_dma 259 drivers/gpu/drm/ttm/ttm_tt.c int ttm_dma_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo, ttm_dma 262 drivers/gpu/drm/ttm/ttm_tt.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 266 drivers/gpu/drm/ttm/ttm_tt.c INIT_LIST_HEAD(&ttm_dma->pages_list); ttm_dma 267 drivers/gpu/drm/ttm/ttm_tt.c if (ttm_dma_tt_alloc_page_directory(ttm_dma)) { ttm_dma 276 drivers/gpu/drm/ttm/ttm_tt.c int ttm_sg_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo, ttm_dma 279 drivers/gpu/drm/ttm/ttm_tt.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 284 drivers/gpu/drm/ttm/ttm_tt.c INIT_LIST_HEAD(&ttm_dma->pages_list); ttm_dma 286 drivers/gpu/drm/ttm/ttm_tt.c ret = ttm_sg_tt_alloc_page_directory(ttm_dma); ttm_dma 288 drivers/gpu/drm/ttm/ttm_tt.c ret = ttm_dma_tt_alloc_page_directory(ttm_dma); ttm_dma 298 drivers/gpu/drm/ttm/ttm_tt.c void ttm_dma_tt_fini(struct ttm_dma_tt *ttm_dma) ttm_dma 300 drivers/gpu/drm/ttm/ttm_tt.c struct ttm_tt *ttm = &ttm_dma->ttm; ttm_dma 305 drivers/gpu/drm/ttm/ttm_tt.c kvfree(ttm_dma->dma_address); ttm_dma 307 drivers/gpu/drm/ttm/ttm_tt.c ttm_dma->dma_address = NULL; ttm_dma 93 include/drm/ttm/ttm_page_alloc.h int ttm_dma_populate(struct ttm_dma_tt *ttm_dma, struct device *dev, ttm_dma 95 include/drm/ttm/ttm_page_alloc.h void ttm_dma_unpopulate(struct ttm_dma_tt *ttm_dma, struct device *dev); ttm_dma 110 include/drm/ttm/ttm_page_alloc.h static inline int ttm_dma_populate(struct ttm_dma_tt *ttm_dma, ttm_dma 116 include/drm/ttm/ttm_page_alloc.h static inline void ttm_dma_unpopulate(struct ttm_dma_tt *ttm_dma, ttm_dma 163 include/drm/ttm/ttm_tt.h int ttm_dma_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo, ttm_dma 165 include/drm/ttm/ttm_tt.h int ttm_sg_tt_init(struct ttm_dma_tt *ttm_dma, struct ttm_buffer_object *bo, ttm_dma 176 include/drm/ttm/ttm_tt.h void ttm_dma_tt_fini(struct ttm_dma_tt *ttm_dma);