Home
last modified time | relevance | path

Searched refs:ttm (Results 1 – 55 of 55) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_tt.c51 static void ttm_tt_alloc_page_directory(struct ttm_tt *ttm) in ttm_tt_alloc_page_directory() argument
53 ttm->pages = drm_calloc_large(ttm->num_pages, sizeof(void*)); in ttm_tt_alloc_page_directory()
56 static void ttm_dma_tt_alloc_page_directory(struct ttm_dma_tt *ttm) in ttm_dma_tt_alloc_page_directory() argument
58 ttm->ttm.pages = drm_calloc_large(ttm->ttm.num_pages, in ttm_dma_tt_alloc_page_directory()
59 sizeof(*ttm->ttm.pages) + in ttm_dma_tt_alloc_page_directory()
60 sizeof(*ttm->dma_address) + in ttm_dma_tt_alloc_page_directory()
61 sizeof(*ttm->cpu_address)); in ttm_dma_tt_alloc_page_directory()
62 ttm->cpu_address = (void *) (ttm->ttm.pages + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory()
63 ttm->dma_address = (void *) (ttm->cpu_address + ttm->ttm.num_pages); in ttm_dma_tt_alloc_page_directory()
106 static int ttm_tt_set_caching(struct ttm_tt *ttm, in ttm_tt_set_caching() argument
[all …]
Dttm_agp_backend.c46 struct ttm_tt ttm; member
51 static int ttm_agp_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) in ttm_agp_bind() argument
53 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_bind()
59 mem = agp_allocate_memory(agp_be->bridge, ttm->num_pages, AGP_USER_MEMORY); in ttm_agp_bind()
64 for (i = 0; i < ttm->num_pages; i++) { in ttm_agp_bind()
65 struct page *page = ttm->pages[i]; in ttm_agp_bind()
68 page = ttm->dummy_read_page; in ttm_agp_bind()
84 static int ttm_agp_unbind(struct ttm_tt *ttm) in ttm_agp_unbind() argument
86 struct ttm_agp_backend *agp_be = container_of(ttm, struct ttm_agp_backend, ttm); in ttm_agp_unbind()
97 static void ttm_agp_destroy(struct ttm_tt *ttm) in ttm_agp_destroy() argument
[all …]
Dttm_bo_util.c51 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm() local
56 ttm_tt_unbind(ttm); in ttm_bo_move_ttm()
63 ret = ttm_tt_set_placement_caching(ttm, new_mem->placement); in ttm_bo_move_ttm()
68 ret = ttm_tt_bind(ttm, new_mem); in ttm_bo_move_ttm()
250 static int ttm_copy_io_ttm_page(struct ttm_tt *ttm, void *src, in ttm_copy_io_ttm_page() argument
254 struct page *d = ttm->pages[page]; in ttm_copy_io_ttm_page()
287 static int ttm_copy_ttm_io_page(struct ttm_tt *ttm, void *dst, in ttm_copy_ttm_io_page() argument
291 struct page *s = ttm->pages[page]; in ttm_copy_ttm_io_page()
329 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_memcpy() local
357 (ttm == NULL || (ttm->state == tt_unpopulated && in ttm_bo_move_memcpy()
[all …]
Dttm_page_alloc.c864 int ttm_pool_populate(struct ttm_tt *ttm) in ttm_pool_populate() argument
866 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob; in ttm_pool_populate()
870 if (ttm->state != tt_unpopulated) in ttm_pool_populate()
873 for (i = 0; i < ttm->num_pages; ++i) { in ttm_pool_populate()
874 ret = ttm_get_pages(&ttm->pages[i], 1, in ttm_pool_populate()
875 ttm->page_flags, in ttm_pool_populate()
876 ttm->caching_state); in ttm_pool_populate()
878 ttm_pool_unpopulate(ttm); in ttm_pool_populate()
882 ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i], in ttm_pool_populate()
885 ttm_pool_unpopulate(ttm); in ttm_pool_populate()
[all …]
Dttm_page_alloc_dma.c852 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_pool_get_pages() local
860 ttm->pages[index] = d_page->p; in ttm_dma_pool_get_pages()
878 struct ttm_tt *ttm = &ttm_dma->ttm; in ttm_dma_populate() local
879 struct ttm_mem_global *mem_glob = ttm->glob->mem_glob; in ttm_dma_populate()
886 if (ttm->state != tt_unpopulated) in ttm_dma_populate()
889 type = ttm_to_type(ttm->page_flags, ttm->caching_state); in ttm_dma_populate()
890 if (ttm->page_flags & TTM_PAGE_FLAG_DMA32) in ttm_dma_populate()
894 if (ttm->page_flags & TTM_PAGE_FLAG_ZERO_ALLOC) in ttm_dma_populate()
906 for (i = 0; i < ttm->num_pages; ++i) { in ttm_dma_populate()
913 ret = ttm_mem_global_alloc_page(mem_glob, ttm->pages[i], in ttm_dma_populate()
[all …]
Dttm_bo_vm.c94 struct ttm_tt *ttm = NULL; in ttm_bo_vm_fault() local
136 if (bo->ttm && (bo->ttm->page_flags & TTM_PAGE_FLAG_SG)) { in ttm_bo_vm_fault()
199 ttm = bo->ttm; in ttm_bo_vm_fault()
204 if (ttm->bdev->driver->ttm_tt_populate(ttm)) { in ttm_bo_vm_fault()
218 page = ttm->pages[page_offset]; in ttm_bo_vm_fault()
DMakefile5 ttm-y := ttm_agp_backend.o ttm_memory.o ttm_tt.o ttm_bo.o \
10 obj-$(CONFIG_DRM_TTM) += ttm.o
Dttm_bo.c150 if (bo->ttm) in ttm_bo_release_list()
151 ttm_tt_destroy(bo->ttm); in ttm_bo_release_list()
179 if (bo->ttm != NULL) { in ttm_bo_add_to_lru()
242 bo->ttm = NULL; in ttm_bo_add_ttm()
252 bo->ttm = bdev->driver->ttm_tt_create(bdev, bo->num_pages << PAGE_SHIFT, in ttm_bo_add_ttm()
254 if (unlikely(bo->ttm == NULL)) in ttm_bo_add_ttm()
258 bo->ttm = bdev->driver->ttm_tt_create(bdev, bo->num_pages << PAGE_SHIFT, in ttm_bo_add_ttm()
261 if (unlikely(bo->ttm == NULL)) { in ttm_bo_add_ttm()
265 bo->ttm->sg = bo->sg; in ttm_bo_add_ttm()
302 if (bo->ttm == NULL) { in ttm_bo_handle_move_mem()
[all …]
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_sgdma.c11 struct ttm_dma_tt ttm; member
16 nouveau_sgdma_destroy(struct ttm_tt *ttm) in nouveau_sgdma_destroy() argument
18 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nouveau_sgdma_destroy()
20 if (ttm) { in nouveau_sgdma_destroy()
21 ttm_dma_tt_fini(&nvbe->ttm); in nouveau_sgdma_destroy()
27 nv04_sgdma_bind(struct ttm_tt *ttm, struct ttm_mem_reg *mem) in nv04_sgdma_bind() argument
29 struct nouveau_sgdma_be *nvbe = (struct nouveau_sgdma_be *)ttm; in nv04_sgdma_bind()
32 if (ttm->sg) { in nv04_sgdma_bind()
33 node->sg = ttm->sg; in nv04_sgdma_bind()
37 node->pages = nvbe->ttm.dma_address; in nv04_sgdma_bind()
[all …]
Dnouveau_ttm.c275 return ttm_bo_mmap(filp, vma, &drm->ttm.bdev); in nouveau_ttm_mmap()
296 global_ref = &drm->ttm.mem_global_ref; in nouveau_ttm_global_init()
305 drm->ttm.mem_global_ref.release = NULL; in nouveau_ttm_global_init()
309 drm->ttm.bo_global_ref.mem_glob = global_ref->object; in nouveau_ttm_global_init()
310 global_ref = &drm->ttm.bo_global_ref.ref; in nouveau_ttm_global_init()
319 drm_global_item_unref(&drm->ttm.mem_global_ref); in nouveau_ttm_global_init()
320 drm->ttm.mem_global_ref.release = NULL; in nouveau_ttm_global_init()
330 if (drm->ttm.mem_global_ref.release == NULL) in nouveau_ttm_global_release()
333 drm_global_item_unref(&drm->ttm.bo_global_ref.ref); in nouveau_ttm_global_release()
334 drm_global_item_unref(&drm->ttm.mem_global_ref); in nouveau_ttm_global_release()
[all …]
Dnouveau_bo.c210 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
225 acc_size = ttm_bo_dma_acc_size(&drm->ttm.bdev, size, in nouveau_bo_new()
228 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
458 struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_device()
468 for (i = 0; i < ttm_dma->ttm.num_pages; i++) in nouveau_bo_sync_for_device()
478 struct ttm_dma_tt *ttm_dma = (struct ttm_dma_tt *)nvbo->bo.ttm; in nouveau_bo_sync_for_cpu()
488 for (i = 0; i < ttm_dma->ttm.num_pages; i++) in nouveau_bo_sync_for_cpu()
522 dma_tt = (struct ttm_dma_tt *)nvbo->bo.ttm; in _nouveau_bo_mem_index()
1060 struct nouveau_channel *chan = drm->ttm.chan; in nouveau_bo_move_m2mf()
1078 ret = drm->ttm.move(chan, bo, &bo->mem, new_mem); in nouveau_bo_move_m2mf()
[all …]
Dnouveau_ttm.h7 return container_of(bd, struct nouveau_drm, ttm.bdev); in nouveau_bdev()
Dnouveau_prime.c36 return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages); in nouveau_gem_prime_get_sg_table()
Dnouveau_drm.h134 } ttm; member
Dnouveau_drm.c149 nvif_object_fini(&drm->ttm.copy); in nouveau_accel_fini()
547 ttm_bo_evict_mm(&drm->ttm.bdev, TTM_PL_VRAM); in nouveau_do_suspend()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_ttm.c306 r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement); in amdgpu_move_vram_ram()
311 r = ttm_tt_bind(bo->ttm, &tmp_mem); in amdgpu_move_vram_ram()
375 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in amdgpu_bo_move()
482 struct ttm_dma_tt ttm; member
491 static int amdgpu_ttm_tt_pin_userptr(struct ttm_tt *ttm) in amdgpu_ttm_tt_pin_userptr() argument
493 struct amdgpu_device *adev = amdgpu_get_adev(ttm->bdev); in amdgpu_ttm_tt_pin_userptr()
494 struct amdgpu_ttm_tt *gtt = (void *)ttm; in amdgpu_ttm_tt_pin_userptr()
508 unsigned long end = gtt->userptr + ttm->num_pages * PAGE_SIZE; in amdgpu_ttm_tt_pin_userptr()
517 unsigned num_pages = ttm->num_pages - pinned; in amdgpu_ttm_tt_pin_userptr()
519 struct page **pages = ttm->pages + pinned; in amdgpu_ttm_tt_pin_userptr()
[all …]
Damdgpu_prime.c37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table()
124 if (amdgpu_ttm_tt_has_userptr(bo->tbo.ttm)) in amdgpu_gem_prime_export()
Damdgpu_gem.c255 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
311 if (amdgpu_ttm_tt_has_userptr(robj->tbo.ttm) || in amdgpu_mode_dumb_mmap()
642 if (amdgpu_ttm_tt_has_userptr(robj->tbo.ttm)) { in amdgpu_gem_op_ioctl()
Damdgpu_bo_list.c114 if (amdgpu_ttm_tt_has_userptr(entry->robj->tbo.ttm)) { in amdgpu_bo_list_set()
Damdgpu_mn.c145 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, in amdgpu_mn_invalidate_range_start()
Damdgpu.h2313 int amdgpu_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
2315 bool amdgpu_ttm_tt_has_userptr(struct ttm_tt *ttm);
2316 bool amdgpu_ttm_tt_affect_userptr(struct ttm_tt *ttm, unsigned long start,
2318 bool amdgpu_ttm_tt_is_readonly(struct ttm_tt *ttm);
2319 uint32_t amdgpu_ttm_tt_pte_flags(struct amdgpu_device *adev, struct ttm_tt *ttm,
Damdgpu_object.c370 if (amdgpu_ttm_tt_has_userptr(bo->tbo.ttm)) in amdgpu_bo_pin_restricted()
Damdgpu_cs.c145 if (amdgpu_ttm_tt_has_userptr(p->uf.bo->tbo.ttm)) { in amdgpu_cs_user_fence_chunk()
Damdgpu_vm.c839 flags = amdgpu_ttm_tt_pte_flags(adev, bo_va->bo->tbo.ttm, mem); in amdgpu_vm_bo_update()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_ttm.c238 if (radeon_ttm_tt_has_userptr(bo->ttm)) in radeon_verify_access()
337 r = ttm_tt_set_placement_caching(bo->ttm, tmp_mem.placement); in radeon_move_vram_ram()
342 r = ttm_tt_bind(bo->ttm, &tmp_mem); in radeon_move_vram_ram()
406 if (old_mem->mem_type == TTM_PL_SYSTEM && bo->ttm == NULL) { in radeon_bo_move()
520 struct ttm_dma_tt ttm; member
530 static int radeon_ttm_tt_pin_userptr(struct ttm_tt *ttm) in radeon_ttm_tt_pin_userptr() argument
532 struct radeon_device *rdev = radeon_get_rdev(ttm->bdev); in radeon_ttm_tt_pin_userptr()
533 struct radeon_ttm_tt *gtt = (void *)ttm; in radeon_ttm_tt_pin_userptr()
547 unsigned long end = gtt->userptr + ttm->num_pages * PAGE_SIZE; in radeon_ttm_tt_pin_userptr()
555 unsigned num_pages = ttm->num_pages - pinned; in radeon_ttm_tt_pin_userptr()
[all …]
Dradeon_prime.c37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
124 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_gem_prime_export()
Dradeon_mn.c145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
Dradeon_gem.c323 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
412 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) { in radeon_mode_dumb_mmap()
712 if (radeon_ttm_tt_has_userptr(robj->tbo.ttm)) in radeon_gem_op_ioctl()
Dradeon_object.c334 if (radeon_ttm_tt_has_userptr(bo->tbo.ttm)) in radeon_bo_pin_restricted()
Dradeon_cs.c154 if (radeon_ttm_tt_has_userptr(p->relocs[i].robj->tbo.ttm)) { in radeon_cs_parser_relocs()
Dradeon.h2826 extern int radeon_ttm_tt_set_userptr(struct ttm_tt *ttm, uint64_t addr,
2828 extern bool radeon_ttm_tt_has_userptr(struct ttm_tt *ttm);
2829 extern bool radeon_ttm_tt_is_readonly(struct ttm_tt *ttm);
Dradeon_vm.c943 if (bo_va->bo && radeon_ttm_tt_is_readonly(bo_va->bo->tbo.ttm)) in radeon_vm_bo_update()
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_buffer.c407 vsgt->pages = vmw_tt->dma_ttm.ttm.pages; in vmw_ttm_map_dma()
408 vsgt->num_pages = vmw_tt->dma_ttm.ttm.num_pages; in vmw_ttm_map_dma()
517 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_map_dma()
534 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_unmap_dma()
554 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_sg_table()
560 static int vmw_ttm_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem) in vmw_ttm_bind() argument
563 container_of(ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_ttm_bind()
576 ttm->num_pages, vmw_be->gmr_id); in vmw_ttm_bind()
580 vmw_mob_create(ttm->num_pages); in vmw_ttm_bind()
586 &vmw_be->vsgt, ttm->num_pages, in vmw_ttm_bind()
[all …]
Dvmwgfx_mob.c267 ret = vmw_bo_driver.ttm_tt_populate(batch->otable_bo->ttm); in vmw_otable_batch_setup()
446 ret = vmw_bo_driver.ttm_tt_populate(mob->pt_bo->ttm); in vmw_mob_pt_populate()
/linux-4.4.14/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c35 return container_of(bd, struct cirrus_device, ttm.bdev); in cirrus_bdev()
55 global_ref = &cirrus->ttm.mem_global_ref; in cirrus_ttm_global_init()
67 cirrus->ttm.bo_global_ref.mem_glob = in cirrus_ttm_global_init()
68 cirrus->ttm.mem_global_ref.object; in cirrus_ttm_global_init()
69 global_ref = &cirrus->ttm.bo_global_ref.ref; in cirrus_ttm_global_init()
77 drm_global_item_unref(&cirrus->ttm.mem_global_ref); in cirrus_ttm_global_init()
86 if (cirrus->ttm.mem_global_ref.release == NULL) in cirrus_ttm_global_release()
89 drm_global_item_unref(&cirrus->ttm.bo_global_ref.ref); in cirrus_ttm_global_release()
90 drm_global_item_unref(&cirrus->ttm.mem_global_ref); in cirrus_ttm_global_release()
91 cirrus->ttm.mem_global_ref.release = NULL; in cirrus_ttm_global_release()
[all …]
Dcirrus_drv.h148 } ttm; member
/linux-4.4.14/drivers/gpu/drm/ast/
Dast_ttm.c35 return container_of(bd, struct ast_private, ttm.bdev); in ast_bdev()
55 global_ref = &ast->ttm.mem_global_ref; in ast_ttm_global_init()
67 ast->ttm.bo_global_ref.mem_glob = in ast_ttm_global_init()
68 ast->ttm.mem_global_ref.object; in ast_ttm_global_init()
69 global_ref = &ast->ttm.bo_global_ref.ref; in ast_ttm_global_init()
77 drm_global_item_unref(&ast->ttm.mem_global_ref); in ast_ttm_global_init()
86 if (ast->ttm.mem_global_ref.release == NULL) in ast_ttm_global_release()
89 drm_global_item_unref(&ast->ttm.bo_global_ref.ref); in ast_ttm_global_release()
90 drm_global_item_unref(&ast->ttm.mem_global_ref); in ast_ttm_global_release()
91 ast->ttm.mem_global_ref.release = NULL; in ast_ttm_global_release()
[all …]
Dast_drv.h107 } ttm; member
/linux-4.4.14/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c35 return container_of(bd, struct mga_device, ttm.bdev); in mgag200_bdev()
55 global_ref = &ast->ttm.mem_global_ref; in mgag200_ttm_global_init()
67 ast->ttm.bo_global_ref.mem_glob = in mgag200_ttm_global_init()
68 ast->ttm.mem_global_ref.object; in mgag200_ttm_global_init()
69 global_ref = &ast->ttm.bo_global_ref.ref; in mgag200_ttm_global_init()
77 drm_global_item_unref(&ast->ttm.mem_global_ref); in mgag200_ttm_global_init()
86 if (ast->ttm.mem_global_ref.release == NULL) in mgag200_ttm_global_release()
89 drm_global_item_unref(&ast->ttm.bo_global_ref.ref); in mgag200_ttm_global_release()
90 drm_global_item_unref(&ast->ttm.mem_global_ref); in mgag200_ttm_global_release()
91 ast->ttm.mem_global_ref.release = NULL; in mgag200_ttm_global_release()
[all …]
Dmgag200_drv.h216 } ttm; member
/linux-4.4.14/drivers/gpu/drm/virtio/
Dvirtgpu_ttm.c289 struct ttm_dma_tt ttm; member
294 static int virtio_gpu_ttm_backend_bind(struct ttm_tt *ttm, in virtio_gpu_ttm_backend_bind() argument
297 struct virtio_gpu_ttm_tt *gtt = (void *)ttm; in virtio_gpu_ttm_backend_bind()
300 if (!ttm->num_pages) in virtio_gpu_ttm_backend_bind()
302 ttm->num_pages, bo_mem, ttm); in virtio_gpu_ttm_backend_bind()
308 static int virtio_gpu_ttm_backend_unbind(struct ttm_tt *ttm) in virtio_gpu_ttm_backend_unbind() argument
314 static void virtio_gpu_ttm_backend_destroy(struct ttm_tt *ttm) in virtio_gpu_ttm_backend_destroy() argument
316 struct virtio_gpu_ttm_tt *gtt = (void *)ttm; in virtio_gpu_ttm_backend_destroy()
318 ttm_dma_tt_fini(&gtt->ttm); in virtio_gpu_ttm_backend_destroy()
328 static int virtio_gpu_ttm_tt_populate(struct ttm_tt *ttm) in virtio_gpu_ttm_tt_populate() argument
[all …]
Dvirtgpu_object.c123 struct page **pages = bo->tbo.ttm->pages; in virtio_gpu_object_get_sg_table()
130 if (bo->tbo.ttm->state == tt_unpopulated) in virtio_gpu_object_get_sg_table()
131 bo->tbo.ttm->bdev->driver->ttm_tt_populate(bo->tbo.ttm); in virtio_gpu_object_get_sg_table()
/linux-4.4.14/include/drm/ttm/
Dttm_bo_driver.h57 int (*bind) (struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem);
67 int (*unbind) (struct ttm_tt *ttm);
77 void (*destroy) (struct ttm_tt *ttm);
145 struct ttm_tt ttm; member
345 int (*ttm_tt_populate)(struct ttm_tt *ttm);
354 void (*ttm_tt_unpopulate)(struct ttm_tt *ttm);
579 extern int ttm_tt_init(struct ttm_tt *ttm, struct ttm_bo_device *bdev,
593 extern void ttm_tt_fini(struct ttm_tt *ttm);
604 extern int ttm_tt_bind(struct ttm_tt *ttm, struct ttm_mem_reg *bo_mem);
613 extern void ttm_tt_destroy(struct ttm_tt *ttm);
[all …]
Dttm_page_alloc.h50 extern int ttm_pool_populate(struct ttm_tt *ttm);
59 extern void ttm_pool_unpopulate(struct ttm_tt *ttm);
Dttm_bo_api.h220 struct ttm_tt *ttm; member
/linux-4.4.14/drivers/gpu/drm/qxl/
Dqxl_ttm.c258 struct ttm_dma_tt ttm; member
263 static int qxl_ttm_backend_bind(struct ttm_tt *ttm, in qxl_ttm_backend_bind() argument
266 struct qxl_ttm_tt *gtt = (void *)ttm; in qxl_ttm_backend_bind()
269 if (!ttm->num_pages) { in qxl_ttm_backend_bind()
271 ttm->num_pages, bo_mem, ttm); in qxl_ttm_backend_bind()
277 static int qxl_ttm_backend_unbind(struct ttm_tt *ttm) in qxl_ttm_backend_unbind() argument
283 static void qxl_ttm_backend_destroy(struct ttm_tt *ttm) in qxl_ttm_backend_destroy() argument
285 struct qxl_ttm_tt *gtt = (void *)ttm; in qxl_ttm_backend_destroy()
287 ttm_dma_tt_fini(&gtt->ttm); in qxl_ttm_backend_destroy()
297 static int qxl_ttm_tt_populate(struct ttm_tt *ttm) in qxl_ttm_tt_populate() argument
[all …]
/linux-4.4.14/drivers/gpu/drm/bochs/
Dbochs_mm.c16 return container_of(bd, struct bochs_device, ttm.bdev); in bochs_bdev()
34 global_ref = &bochs->ttm.mem_global_ref; in bochs_ttm_global_init()
46 bochs->ttm.bo_global_ref.mem_glob = in bochs_ttm_global_init()
47 bochs->ttm.mem_global_ref.object; in bochs_ttm_global_init()
48 global_ref = &bochs->ttm.bo_global_ref.ref; in bochs_ttm_global_init()
56 drm_global_item_unref(&bochs->ttm.mem_global_ref); in bochs_ttm_global_init()
65 if (bochs->ttm.mem_global_ref.release == NULL) in bochs_ttm_global_release()
68 drm_global_item_unref(&bochs->ttm.bo_global_ref.ref); in bochs_ttm_global_release()
69 drm_global_item_unref(&bochs->ttm.mem_global_ref); in bochs_ttm_global_release()
70 bochs->ttm.mem_global_ref.release = NULL; in bochs_ttm_global_release()
[all …]
Dbochs.h86 } ttm; member
/linux-4.4.14/arch/powerpc/perf/
Dppc970-pmu.c264 unsigned int ttm, grp; in p970_compute_mmcr() local
321 ttm = unitmap[i]; in p970_compute_mmcr()
322 ++ttmuse[(ttm >> 2) & 1]; in p970_compute_mmcr()
323 mmcr1 |= (unsigned long)(ttm & ~4) << MMCR1_TTM1SEL_SH; in p970_compute_mmcr()
335 ttm = (unitmap[unit] >> 2) & 1; in p970_compute_mmcr()
337 ttm = 2; in p970_compute_mmcr()
339 ttm = 3; in p970_compute_mmcr()
343 mmcr1 |= (unsigned long)ttm in p970_compute_mmcr()
Dpower4-pmu.c363 unsigned int ttm, grp; in p4_compute_mmcr() local
461 ttm = unit - 1; /* 2->1, 3->2 */ in p4_compute_mmcr()
463 ttm = unit >> 2; in p4_compute_mmcr()
464 mmcr1 |= (unsigned long)ttm in p4_compute_mmcr()
Dpower5-pmu.c391 unsigned int ttm, grp; in power5_compute_mmcr() local
485 ttm = unit >> 2; in power5_compute_mmcr()
486 mmcr1 |= (unsigned long)ttm in power5_compute_mmcr()
Dpower5+-pmu.c460 unsigned int ttm; in power5p_compute_mmcr() local
545 ttm = unit >> 2; in power5p_compute_mmcr()
546 mmcr1 |= (unsigned long)ttm in power5p_compute_mmcr()
/linux-4.4.14/arch/x86/platform/uv/
Dtlb_uv.c518 cycles_t ttm; in uv1_wait_completion() local
535 ttm = get_cycles(); in uv1_wait_completion()
543 if (cycles_2_us(ttm - bcp->send_message) < timeout_us) { in uv1_wait_completion()
614 cycles_t ttm; in uv2_3_wait_completion() local
634 ttm = get_cycles(); in uv2_3_wait_completion()
645 if (cycles_2_us(ttm - bcp->send_message) < timeout_us) { in uv2_3_wait_completion()
660 ttm = get_cycles(); in uv2_3_wait_completion()
661 if ((ttm - bcp->send_message) > bcp->timeout_interval) in uv2_3_wait_completion()
/linux-4.4.14/drivers/gpu/drm/
DMakefile38 obj-$(CONFIG_DRM_TTM) += ttm/
/linux-4.4.14/drivers/net/ethernet/dec/tulip/
Dde4x5.c516 u_int ttm; /* Transmit Threshold Mode for each media */ member
4629 lp->phy[lp->active].ttm = get_unaligned_le16(p); in type1_infoblock()
4710 lp->phy[lp->active].ttm = get_unaligned_le16(p); p += 2; in type3_infoblock()