Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 153 of 153) sorted by relevance

/linux-4.4.14/drivers/gpu/drm/qxl/
Dqxl_object.c32 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy()
36 qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_ttm_bo_destroy()
38 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
40 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
42 drm_gem_object_release(&bo->gem_base); in qxl_ttm_bo_destroy()
43 kfree(bo); in qxl_ttm_bo_destroy()
46 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
48 if (bo->destroy == &qxl_ttm_bo_destroy) in qxl_ttm_bo_is_qxl_bo()
83 struct qxl_bo *bo; in qxl_bo_create() local
[all …]
Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo, bool no_wait) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_reserve()
37 struct qxl_device *qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_bo_reserve()
38 dev_err(qdev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
45 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
50 static inline u64 qxl_bo_gpu_offset(struct qxl_bo *bo) in qxl_bo_gpu_offset() argument
52 return bo->tbo.offset; in qxl_bo_gpu_offset()
55 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size()
[all …]
Dqxl_release.c166 struct qxl_bo *bo; in qxl_release_free_list() local
170 bo = to_qxl_bo(entry->tv.bo); in qxl_release_free_list()
171 qxl_bo_unref(&bo); in qxl_release_free_list()
204 struct qxl_bo **bo) in qxl_release_bo_alloc() argument
210 bo); in qxl_release_bo_alloc()
214 int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo) in qxl_release_list_add() argument
219 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add()
227 qxl_bo_ref(bo); in qxl_release_list_add()
228 entry->tv.bo = &bo->tbo; in qxl_release_list_add()
234 static int qxl_release_validate_bo(struct qxl_bo *bo) in qxl_release_validate_bo() argument
[all …]
Dqxl_ttm.c111 struct ttm_buffer_object *bo; in qxl_ttm_fault() local
114 bo = (struct ttm_buffer_object *)vma->vm_private_data; in qxl_ttm_fault()
115 if (bo == NULL) in qxl_ttm_fault()
187 static void qxl_evict_flags(struct ttm_buffer_object *bo, in qxl_evict_flags() argument
197 if (!qxl_ttm_bo_is_qxl_bo(bo)) { in qxl_evict_flags()
204 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags()
209 static int qxl_verify_access(struct ttm_buffer_object *bo, struct file *filp) in qxl_verify_access() argument
211 struct qxl_bo *qbo = to_qxl_bo(bo); in qxl_verify_access()
337 static void qxl_move_null(struct ttm_buffer_object *bo, in qxl_move_null() argument
340 struct ttm_mem_reg *old_mem = &bo->mem; in qxl_move_null()
[all …]
Dqxl_display.c228 struct qxl_bo *bo = gem_to_qxl_bo(qfb_src->obj); in qxl_crtc_page_flip() local
242 bo->is_primary = true; in qxl_crtc_page_flip()
244 ret = qxl_bo_reserve(bo, false); in qxl_crtc_page_flip()
247 ret = qxl_bo_pin(bo, bo->type, NULL); in qxl_crtc_page_flip()
248 qxl_bo_unreserve(bo); in qxl_crtc_page_flip()
252 qxl_draw_dirty_fb(qdev, qfb_src, bo, 0, 0, in qxl_crtc_page_flip()
264 ret = qxl_bo_reserve(bo, false); in qxl_crtc_page_flip()
266 qxl_bo_unpin(bo); in qxl_crtc_page_flip()
267 qxl_bo_unreserve(bo); in qxl_crtc_page_flip()
612 struct qxl_bo *bo, *old_bo = NULL; in qxl_crtc_mode_set() local
[all …]
Dqxl_debugfs.c58 struct qxl_bo *bo; in qxl_debugfs_buffers_info() local
60 list_for_each_entry(bo, &qdev->gem.objects, list) { in qxl_debugfs_buffers_info()
65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
70 (unsigned long)bo->gem_base.size, in qxl_debugfs_buffers_info()
71 bo->pin_count, rel); in qxl_debugfs_buffers_info()
Dqxl_image.c45 ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo); in qxl_allocate_chunk()
70 ret = qxl_alloc_bo_reserved(qdev, release, sizeof(struct qxl_image), &image->bo); in qxl_image_alloc_objects()
78 qxl_bo_unref(&image->bo); in qxl_image_alloc_objects()
91 qxl_bo_unref(&chunk->bo); in qxl_image_free_objects()
95 qxl_bo_unref(&dimage->bo); in qxl_image_free_objects()
122 chunk_bo = drv_chunk->bo; in qxl_image_init_helper()
189 image_bo = dimage->bo; in qxl_image_init_helper()
Dqxl_cmd.c185 cmd.data = qxl_bo_physical_address(qdev, to_qxl_bo(entry->tv.bo), release->release_offset); in qxl_push_command_ring_release()
198 cmd.data = qxl_bo_physical_address(qdev, to_qxl_bo(entry->tv.bo), release->release_offset); in qxl_push_cursor_ring_release()
261 struct qxl_bo *bo; in qxl_alloc_bo_reserved() local
265 false, QXL_GEM_DOMAIN_VRAM, NULL, &bo); in qxl_alloc_bo_reserved()
270 ret = qxl_release_list_add(release, bo); in qxl_alloc_bo_reserved()
274 *_bo = bo; in qxl_alloc_bo_reserved()
277 qxl_bo_unref(&bo); in qxl_alloc_bo_reserved()
380 unsigned offset, struct qxl_bo *bo) in qxl_io_create_primary() argument
387 create->format = bo->surf.format; in qxl_io_create_primary()
388 create->width = bo->surf.width; in qxl_io_create_primary()
[all …]
Dqxl_drv.h204 struct qxl_bo *bo; member
208 struct qxl_bo *bo; member
371 qxl_bo_physical_address(struct qxl_device *qdev, struct qxl_bo *bo, in qxl_bo_physical_address() argument
374 int slot_id = bo->type == QXL_GEM_DOMAIN_VRAM ? qdev->main_mem_slot : qdev->surfaces_mem_slot; in qxl_bo_physical_address()
378 return slot->high_bits | (bo->tbo.offset + offset); in qxl_bo_physical_address()
426 int qxl_bo_kmap(struct qxl_bo *bo, void **ptr);
463 struct qxl_bo *bo);
482 int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo);
512 struct qxl_bo *bo,
573 int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo);
Dqxl_draw.c221 ptr = qxl_bo_kmap_atomic_page(qdev, dimage->bo, 0); in qxl_draw_opaque_fb()
225 qxl_bo_kunmap_atomic_page(qdev, dimage->bo, ptr); in qxl_draw_opaque_fb()
243 qxl_bo_physical_address(qdev, dimage->bo, 0); in qxl_draw_opaque_fb()
268 struct qxl_bo *bo, in qxl_draw_dirty_fb() argument
341 ret = qxl_bo_kmap(bo, (void **)&surface_base); in qxl_draw_dirty_fb()
348 qxl_bo_kunmap(bo); in qxl_draw_dirty_fb()
374 drawable->u.copy.src_bitmap = qxl_bo_physical_address(qdev, dimage->bo, 0); in qxl_draw_dirty_fb()
/linux-4.4.14/drivers/gpu/drm/radeon/
Dradeon_object.c43 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
50 static void radeon_update_memory_usage(struct radeon_bo *bo, in radeon_update_memory_usage() argument
53 struct radeon_device *rdev = bo->rdev; in radeon_update_memory_usage()
54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage()
74 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy()
80 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
81 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
82 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
[all …]
Dradeon_prime.c34 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_get_sg_table() local
35 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
42 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vmap() local
45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
46 &bo->dma_buf_vmap); in radeon_gem_prime_vmap()
50 return bo->dma_buf_vmap.virtual; in radeon_gem_prime_vmap()
55 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vunmap() local
57 ttm_bo_kunmap(&bo->dma_buf_vmap); in radeon_gem_prime_vunmap()
66 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local
[all …]
Dradeon_object.h64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) in radeon_bo_reserve() argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
91 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument
93 return bo->tbo.offset; in radeon_bo_gpu_offset()
96 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument
98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
101 static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo) in radeon_bo_ngpu_pages() argument
[all …]
Dradeon_mn.c73 struct radeon_bo *bo, *next_bo; in radeon_mn_destroy() local
82 list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) { in radeon_mn_destroy()
83 bo->mn = NULL; in radeon_mn_destroy()
84 list_del_init(&bo->mn_list); in radeon_mn_destroy()
137 struct radeon_bo *bo; in radeon_mn_invalidate_range_start() local
143 list_for_each_entry(bo, &node->bos, mn_list) { in radeon_mn_invalidate_range_start()
145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
148 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate_range_start()
154 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start()
159 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU); in radeon_mn_invalidate_range_start()
[all …]
Dradeon_vm.c144 list[0].tv.bo = &vm->page_directory->tbo; in radeon_vm_get_bos()
150 if (!vm->page_tables[i].bo) in radeon_vm_get_bos()
153 list[idx].robj = vm->page_tables[i].bo; in radeon_vm_get_bos()
156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
294 struct radeon_bo *bo) in radeon_vm_bo_find() argument
298 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
321 struct radeon_bo *bo) in radeon_vm_bo_add() argument
330 bo_va->bo = bo; in radeon_vm_bo_add()
339 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add()
388 struct radeon_bo *bo) in radeon_vm_clear_bo() argument
[all …]
Dradeon_ttm.c178 static void radeon_evict_flags(struct ttm_buffer_object *bo, in radeon_evict_flags() argument
189 if (!radeon_ttm_bo_is_radeon_bo(bo)) { in radeon_evict_flags()
196 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags()
197 switch (bo->mem.mem_type) { in radeon_evict_flags()
202 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) { in radeon_evict_flags()
234 static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp) in radeon_verify_access() argument
236 struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo); in radeon_verify_access()
238 if (radeon_ttm_tt_has_userptr(bo->ttm)) in radeon_verify_access()
243 static void radeon_move_null(struct ttm_buffer_object *bo, in radeon_move_null() argument
246 struct ttm_mem_reg *old_mem = &bo->mem; in radeon_move_null()
[all …]
Dradeon_sa.c57 sa_manager->bo = NULL; in radeon_sa_bo_manager_init()
68 domain, flags, NULL, NULL, &sa_manager->bo); in radeon_sa_bo_manager_init()
92 radeon_bo_unref(&sa_manager->bo); in radeon_sa_bo_manager_fini()
101 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_start()
107 r = radeon_bo_reserve(sa_manager->bo, false); in radeon_sa_bo_manager_start()
112 r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in radeon_sa_bo_manager_start()
114 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start()
118 r = radeon_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr); in radeon_sa_bo_manager_start()
119 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start()
128 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_suspend()
[all …]
Dradeon_trace.h15 TP_PROTO(struct radeon_bo *bo),
16 TP_ARGS(bo),
18 __field(struct radeon_bo *, bo)
23 __entry->bo = bo;
24 __entry->pages = bo->tbo.num_pages;
26 TP_printk("bo=%p, pages=%u", __entry->bo, __entry->pages)
Dradeon_kfd.c45 struct radeon_bo *bo; member
247 RADEON_GEM_GTT_WC, NULL, NULL, &(*mem)->bo); in alloc_gtt_mem()
255 r = radeon_bo_reserve((*mem)->bo, true); in alloc_gtt_mem()
261 r = radeon_bo_pin((*mem)->bo, RADEON_GEM_DOMAIN_GTT, in alloc_gtt_mem()
269 r = radeon_bo_kmap((*mem)->bo, &(*mem)->cpu_ptr); in alloc_gtt_mem()
277 radeon_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
282 radeon_bo_unpin((*mem)->bo); in alloc_gtt_mem()
284 radeon_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
286 radeon_bo_unref(&(*mem)->bo); in alloc_gtt_mem()
297 radeon_bo_reserve(mem->bo, true); in free_gtt_mem()
[all …]
Dradeon_gem.c287 struct radeon_bo *bo; in radeon_gem_userptr_ioctl() local
322 bo = gem_to_radeon_bo(gobj); in radeon_gem_userptr_ioctl()
323 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
328 r = radeon_mn_register(bo, args->addr); in radeon_gem_userptr_ioctl()
335 r = radeon_bo_reserve(bo, true); in radeon_gem_userptr_ioctl()
341 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_GTT); in radeon_gem_userptr_ioctl()
342 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_gem_userptr_ioctl()
343 radeon_bo_unreserve(bo); in radeon_gem_userptr_ioctl()
551 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm()
564 domain = radeon_mem_type_to_domain(entry->bo->mem.mem_type); in radeon_gem_va_update_vm()
[all …]
Dradeon_cs.c167 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs()
423 struct radeon_bo *bo = parser->relocs[i].robj; in radeon_cs_parser_fini() local
424 if (bo == NULL) in radeon_cs_parser_fini()
427 drm_gem_object_unreference_unlocked(&bo->gem_base); in radeon_cs_parser_fini()
499 &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte()
504 struct radeon_bo *bo; in radeon_bo_vm_update_pte() local
506 bo = p->relocs[i].robj; in radeon_bo_vm_update_pte()
507 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte()
509 dev_err(rdev->dev, "bo %p not in vm %p\n", bo, vm); in radeon_bo_vm_update_pte()
513 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); in radeon_bo_vm_update_pte()
Dradeon_uvd.c421 static int radeon_uvd_cs_msg(struct radeon_cs_parser *p, struct radeon_bo *bo, in radeon_uvd_cs_msg() argument
436 f = reservation_object_get_excl(bo->tbo.resv); in radeon_uvd_cs_msg()
445 r = radeon_bo_kmap(bo, &ptr); in radeon_uvd_cs_msg()
467 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
493 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
515 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
Dradeon_kms.c656 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); in radeon_driver_open_kms()
666 rdev->ring_tmp_bo.bo); in radeon_driver_open_kms()
705 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); in radeon_driver_postclose_kms()
709 radeon_bo_unreserve(rdev->ring_tmp_bo.bo); in radeon_driver_postclose_kms()
Dradeon.h436 struct radeon_bo *bo; member
480 struct radeon_bo *bo; member
541 struct radeon_bo *bo; member
917 struct radeon_bo *bo; member
1790 int radeon_mn_register(struct radeon_bo *bo, unsigned long addr);
1791 void radeon_mn_unregister(struct radeon_bo *bo);
1793 static inline int radeon_mn_register(struct radeon_bo *bo, unsigned long addr) in radeon_mn_register() argument
1797 static inline void radeon_mn_unregister(struct radeon_bo *bo) {} in radeon_mn_unregister() argument
2825 extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
2868 struct radeon_bo *bo);
[all …]
Dradeon_pm.c146 struct radeon_bo *bo, *n; in radeon_unmap_vram_bos() local
151 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_unmap_vram_bos()
152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_unmap_vram_bos()
153 ttm_bo_unmap_virtual(&bo->tbo); in radeon_unmap_vram_bos()
Dradeon_device.c219 if (rdev->surface_regs[i].bo) in radeon_surface_init()
220 radeon_bo_get_surface_reg(rdev->surface_regs[i].bo); in radeon_surface_init()
/linux-4.4.14/drivers/gpu/drm/tegra/
Dgem.c23 static inline struct tegra_bo *host1x_to_tegra_bo(struct host1x_bo *bo) in host1x_to_tegra_bo() argument
25 return container_of(bo, struct tegra_bo, base); in host1x_to_tegra_bo()
28 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
30 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
38 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument
40 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
45 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument
49 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
51 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
56 static void tegra_bo_munmap(struct host1x_bo *bo, void *addr) in tegra_bo_munmap() argument
[all …]
Dfb.c66 struct tegra_bo *bo = fb->planes[i]; in tegra_fb_destroy() local
68 if (bo) { in tegra_fb_destroy()
69 if (bo->pages && bo->vaddr) in tegra_fb_destroy()
70 vunmap(bo->vaddr); in tegra_fb_destroy()
72 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fb_destroy()
208 struct tegra_bo *bo; in tegra_fbdev_probe() local
223 bo = tegra_bo_create(drm, size, 0); in tegra_fbdev_probe()
224 if (IS_ERR(bo)) in tegra_fbdev_probe()
225 return PTR_ERR(bo); in tegra_fbdev_probe()
230 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe()
[all …]
Ddrm.c274 struct tegra_bo *bo; in host1x_bo_lookup() local
284 bo = to_tegra_bo(gem); in host1x_bo_lookup()
285 return &bo->base; in host1x_bo_lookup()
316 dest->cmdbuf.bo = host1x_bo_lookup(drm, file, cmdbuf); in host1x_reloc_copy_from_user()
317 if (!dest->cmdbuf.bo) in host1x_reloc_copy_from_user()
320 dest->target.bo = host1x_bo_lookup(drm, file, target); in host1x_reloc_copy_from_user()
321 if (!dest->target.bo) in host1x_reloc_copy_from_user()
361 struct host1x_bo *bo; in tegra_drm_submit() local
368 bo = host1x_bo_lookup(drm, file, cmdbuf.handle); in tegra_drm_submit()
369 if (!bo) { in tegra_drm_submit()
[all …]
Ddc.c590 struct tegra_bo *bo = tegra_fb_get_plane(fb, i); in tegra_plane_atomic_update() local
592 window.base[i] = bo->paddr + fb->offsets[i]; in tegra_plane_atomic_update()
711 struct tegra_bo *bo = tegra_fb_get_plane(plane->state->fb, 0); in tegra_cursor_atomic_update() local
743 value |= (bo->paddr >> 10) & 0x3fffff; in tegra_cursor_atomic_update()
747 value = (bo->paddr >> 32) & 0x3; in tegra_cursor_atomic_update()
958 struct tegra_bo *bo; in tegra_dc_finish_page_flip() local
967 bo = tegra_fb_get_plane(crtc->primary->fb, 0); in tegra_dc_finish_page_flip()
979 if (base == bo->paddr + crtc->primary->fb->offsets[0]) { in tegra_dc_finish_page_flip()
/linux-4.4.14/drivers/gpu/drm/amd/amdgpu/
Damdgpu_object.c94 struct amdgpu_bo *bo; in amdgpu_ttm_bo_destroy() local
96 bo = container_of(tbo, struct amdgpu_bo, tbo); in amdgpu_ttm_bo_destroy()
98 amdgpu_update_memory_usage(bo->adev, &bo->tbo.mem, NULL); in amdgpu_ttm_bo_destroy()
100 mutex_lock(&bo->adev->gem.mutex); in amdgpu_ttm_bo_destroy()
101 list_del_init(&bo->list); in amdgpu_ttm_bo_destroy()
102 mutex_unlock(&bo->adev->gem.mutex); in amdgpu_ttm_bo_destroy()
103 drm_gem_object_release(&bo->gem_base); in amdgpu_ttm_bo_destroy()
104 amdgpu_bo_unref(&bo->parent); in amdgpu_ttm_bo_destroy()
105 kfree(bo->metadata); in amdgpu_ttm_bo_destroy()
106 kfree(bo); in amdgpu_ttm_bo_destroy()
[all …]
Damdgpu_prime.c34 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_get_sg_table() local
35 int npages = bo->tbo.num_pages; in amdgpu_gem_prime_get_sg_table()
37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in amdgpu_gem_prime_get_sg_table()
42 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_vmap() local
45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in amdgpu_gem_prime_vmap()
46 &bo->dma_buf_vmap); in amdgpu_gem_prime_vmap()
50 return bo->dma_buf_vmap.virtual; in amdgpu_gem_prime_vmap()
55 struct amdgpu_bo *bo = gem_to_amdgpu_bo(obj); in amdgpu_gem_prime_vunmap() local
57 ttm_bo_kunmap(&bo->dma_buf_vmap); in amdgpu_gem_prime_vunmap()
66 struct amdgpu_bo *bo; in amdgpu_gem_prime_import_sg_table() local
[all …]
Damdgpu_object.h70 static inline int amdgpu_bo_reserve(struct amdgpu_bo *bo, bool no_intr) in amdgpu_bo_reserve() argument
74 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, 0); in amdgpu_bo_reserve()
77 dev_err(bo->adev->dev, "%p reserve failed\n", bo); in amdgpu_bo_reserve()
83 static inline void amdgpu_bo_unreserve(struct amdgpu_bo *bo) in amdgpu_bo_unreserve() argument
85 ttm_bo_unreserve(&bo->tbo); in amdgpu_bo_unreserve()
97 static inline u64 amdgpu_bo_gpu_offset(struct amdgpu_bo *bo) in amdgpu_bo_gpu_offset() argument
99 return bo->tbo.offset; in amdgpu_bo_gpu_offset()
102 static inline unsigned long amdgpu_bo_size(struct amdgpu_bo *bo) in amdgpu_bo_size() argument
104 return bo->tbo.num_pages << PAGE_SHIFT; in amdgpu_bo_size()
107 static inline unsigned amdgpu_bo_ngpu_pages(struct amdgpu_bo *bo) in amdgpu_bo_ngpu_pages() argument
[all …]
Damdgpu_mn.c73 struct amdgpu_bo *bo, *next_bo; in amdgpu_mn_destroy() local
82 list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) { in amdgpu_mn_destroy()
83 bo->mn = NULL; in amdgpu_mn_destroy()
84 list_del_init(&bo->mn_list); in amdgpu_mn_destroy()
137 struct amdgpu_bo *bo; in amdgpu_mn_invalidate_range_start() local
143 list_for_each_entry(bo, &node->bos, mn_list) { in amdgpu_mn_invalidate_range_start()
145 if (!amdgpu_ttm_tt_affect_userptr(bo->tbo.ttm, start, in amdgpu_mn_invalidate_range_start()
149 r = amdgpu_bo_reserve(bo, true); in amdgpu_mn_invalidate_range_start()
155 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in amdgpu_mn_invalidate_range_start()
160 amdgpu_ttm_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_CPU); in amdgpu_mn_invalidate_range_start()
[all …]
Damdgpu_uvd.c338 struct amdgpu_bo *bo; in amdgpu_uvd_cs_pass1() local
347 mapping = amdgpu_cs_find_mapping(ctx->parser, addr, &bo); in amdgpu_uvd_cs_pass1()
359 amdgpu_ttm_placement_from_domain(bo, domain); in amdgpu_uvd_cs_pass1()
361 amdgpu_uvd_force_into_uvd_segment(bo); in amdgpu_uvd_cs_pass1()
363 r = ttm_bo_validate(&bo->tbo, &bo->placement, false, false); in amdgpu_uvd_cs_pass1()
520 struct amdgpu_bo *bo, unsigned offset) in amdgpu_uvd_cs_msg() argument
533 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, true, false, in amdgpu_uvd_cs_msg()
540 r = amdgpu_bo_kmap(bo, &ptr); in amdgpu_uvd_cs_msg()
559 amdgpu_bo_kunmap(bo); in amdgpu_uvd_cs_msg()
580 amdgpu_bo_kunmap(bo); in amdgpu_uvd_cs_msg()
[all …]
Damdgpu_trace.h15 TP_PROTO(struct amdgpu_bo *bo),
16 TP_ARGS(bo),
18 __field(struct amdgpu_bo *, bo)
23 __entry->bo = bo;
24 __entry->pages = bo->tbo.num_pages;
26 TP_printk("bo=%p, pages=%u", __entry->bo, __entry->pages)
122 __field(struct amdgpu_bo *, bo)
130 __entry->bo = bo_va->bo;
137 __entry->bo, __entry->start, __entry->last,
146 __field(struct amdgpu_bo *, bo)
[all …]
Damdgpu_amdkfd.c186 AMDGPU_GEM_CREATE_CPU_GTT_USWC, NULL, NULL, &(*mem)->bo); in alloc_gtt_mem()
194 r = amdgpu_bo_reserve((*mem)->bo, true); in alloc_gtt_mem()
200 r = amdgpu_bo_pin((*mem)->bo, AMDGPU_GEM_DOMAIN_GTT, in alloc_gtt_mem()
208 r = amdgpu_bo_kmap((*mem)->bo, &(*mem)->cpu_ptr); in alloc_gtt_mem()
216 amdgpu_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
221 amdgpu_bo_unpin((*mem)->bo); in alloc_gtt_mem()
223 amdgpu_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
225 amdgpu_bo_unref(&(*mem)->bo); in alloc_gtt_mem()
236 amdgpu_bo_reserve(mem->bo, true); in free_gtt_mem()
237 amdgpu_bo_kunmap(mem->bo); in free_gtt_mem()
[all …]
Damdgpu_ttm.c174 static void amdgpu_evict_flags(struct ttm_buffer_object *bo, in amdgpu_evict_flags() argument
184 if (!amdgpu_ttm_bo_is_amdgpu_bo(bo)) { in amdgpu_evict_flags()
191 rbo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_evict_flags()
192 switch (bo->mem.mem_type) { in amdgpu_evict_flags()
206 static int amdgpu_verify_access(struct ttm_buffer_object *bo, struct file *filp) in amdgpu_verify_access() argument
208 struct amdgpu_bo *rbo = container_of(bo, struct amdgpu_bo, tbo); in amdgpu_verify_access()
213 static void amdgpu_move_null(struct ttm_buffer_object *bo, in amdgpu_move_null() argument
216 struct ttm_mem_reg *old_mem = &bo->mem; in amdgpu_move_null()
223 static int amdgpu_move_blit(struct ttm_buffer_object *bo, in amdgpu_move_blit() argument
234 adev = amdgpu_get_adev(bo->bdev); in amdgpu_move_blit()
[all …]
Damdgpu_ucode.c241 struct amdgpu_bo **bo = &adev->firmware.fw_buf; in amdgpu_ucode_init_bo() local
250 AMDGPU_GEM_DOMAIN_GTT, 0, NULL, NULL, bo); in amdgpu_ucode_init_bo()
257 err = amdgpu_bo_reserve(*bo, false); in amdgpu_ucode_init_bo()
259 amdgpu_bo_unref(bo); in amdgpu_ucode_init_bo()
264 err = amdgpu_bo_pin(*bo, AMDGPU_GEM_DOMAIN_GTT, &fw_mc_addr); in amdgpu_ucode_init_bo()
266 amdgpu_bo_unreserve(*bo); in amdgpu_ucode_init_bo()
267 amdgpu_bo_unref(bo); in amdgpu_ucode_init_bo()
272 err = amdgpu_bo_kmap(*bo, &fw_buf_ptr); in amdgpu_ucode_init_bo()
275 amdgpu_bo_unpin(*bo); in amdgpu_ucode_init_bo()
276 amdgpu_bo_unreserve(*bo); in amdgpu_ucode_init_bo()
[all …]
Damdgpu_sa.c57 sa_manager->bo = NULL; in amdgpu_sa_bo_manager_init()
68 0, NULL, NULL, &sa_manager->bo); in amdgpu_sa_bo_manager_init()
92 amdgpu_bo_unref(&sa_manager->bo); in amdgpu_sa_bo_manager_fini()
101 if (sa_manager->bo == NULL) { in amdgpu_sa_bo_manager_start()
107 r = amdgpu_bo_reserve(sa_manager->bo, false); in amdgpu_sa_bo_manager_start()
112 r = amdgpu_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in amdgpu_sa_bo_manager_start()
114 amdgpu_bo_unreserve(sa_manager->bo); in amdgpu_sa_bo_manager_start()
118 r = amdgpu_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr); in amdgpu_sa_bo_manager_start()
119 amdgpu_bo_unreserve(sa_manager->bo); in amdgpu_sa_bo_manager_start()
128 if (sa_manager->bo == NULL) { in amdgpu_sa_bo_manager_suspend()
[all …]
Damdgpu_cs.c142 p->uf.bo = amdgpu_bo_ref(gem_to_amdgpu_bo(gobj)); in amdgpu_cs_user_fence_chunk()
145 if (amdgpu_ttm_tt_has_userptr(p->uf.bo->tbo.ttm)) { in amdgpu_cs_user_fence_chunk()
150 p->uf_entry.robj = amdgpu_bo_ref(p->uf.bo); in amdgpu_cs_user_fence_chunk()
154 p->uf_entry.tv.bo = &p->uf_entry.robj->tbo; in amdgpu_cs_user_fence_chunk()
344 struct amdgpu_bo *bo; in amdgpu_cs_list_validate() local
350 bo = lobj->robj; in amdgpu_cs_list_validate()
351 if (!bo->pin_count) { in amdgpu_cs_list_validate()
354 amdgpu_mem_type_to_domain(bo->tbo.mem.mem_type); in amdgpu_cs_list_validate()
372 amdgpu_ttm_placement_from_domain(bo, domain); in amdgpu_cs_list_validate()
374 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_cs_list_validate()
[all …]
Damdgpu_vm.c104 list[0].tv.bo = &vm->page_directory->tbo; in amdgpu_vm_get_bos()
109 if (!vm->page_tables[i].bo) in amdgpu_vm_get_bos()
112 list[idx].robj = vm->page_tables[i].bo; in amdgpu_vm_get_bos()
116 list[idx].tv.bo = &list[idx].robj->tbo; in amdgpu_vm_get_bos()
272 struct amdgpu_bo *bo) in amdgpu_vm_bo_find() argument
276 list_for_each_entry(bo_va, &bo->va, bo_list) { in amdgpu_vm_bo_find()
339 struct amdgpu_bo *bo) in amdgpu_vm_clear_bo() argument
348 r = reservation_object_reserve_shared(bo->tbo.resv); in amdgpu_vm_clear_bo()
352 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_vm_clear_bo()
356 addr = amdgpu_bo_gpu_offset(bo); in amdgpu_vm_clear_bo()
[all …]
Damdgpu_gem.c225 struct amdgpu_bo *bo; in amdgpu_gem_userptr_ioctl() local
254 bo = gem_to_amdgpu_bo(gobj); in amdgpu_gem_userptr_ioctl()
255 r = amdgpu_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in amdgpu_gem_userptr_ioctl()
260 r = amdgpu_mn_register(bo, args->addr); in amdgpu_gem_userptr_ioctl()
267 r = amdgpu_bo_reserve(bo, true); in amdgpu_gem_userptr_ioctl()
273 amdgpu_ttm_placement_from_domain(bo, AMDGPU_GEM_DOMAIN_GTT); in amdgpu_gem_userptr_ioctl()
274 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in amdgpu_gem_userptr_ioctl()
275 amdgpu_bo_unreserve(bo); in amdgpu_gem_userptr_ioctl()
460 tv.bo = &bo_va->bo->tbo; in amdgpu_gem_va_update_vm()
474 domain = amdgpu_mem_type_to_domain(entry->bo->mem.mem_type); in amdgpu_gem_va_update_vm()
[all …]
Damdgpu_cgs.c83 struct amdgpu_bo *bo; in amdgpu_cgs_gmap_kmem() local
89 AMDGPU_GEM_DOMAIN_GTT, 0, sg, NULL, &bo); in amdgpu_cgs_gmap_kmem()
92 ret = amdgpu_bo_reserve(bo, false); in amdgpu_cgs_gmap_kmem()
97 ret = amdgpu_bo_pin_restricted(bo, AMDGPU_GEM_DOMAIN_GTT, in amdgpu_cgs_gmap_kmem()
99 amdgpu_bo_unreserve(bo); in amdgpu_cgs_gmap_kmem()
101 *kmem_handle = (cgs_handle_t)bo; in amdgpu_cgs_gmap_kmem()
Damdgpu_amdkfd.h34 struct amdgpu_bo *bo; member
Damdgpu.h426 struct amdgpu_bo *bo; member
514 struct amdgpu_bo *bo; member
596 struct amdgpu_bo *bo; member
919 struct amdgpu_bo *bo; member
1004 struct amdgpu_bo *bo);
1006 struct amdgpu_bo *bo);
1009 struct amdgpu_bo *bo);
1759 int amdgpu_mn_register(struct amdgpu_bo *bo, unsigned long addr);
1760 void amdgpu_mn_unregister(struct amdgpu_bo *bo);
1762 static inline int amdgpu_mn_register(struct amdgpu_bo *bo, unsigned long addr) in amdgpu_mn_register() argument
[all …]
Damdgpu_vce.c526 struct amdgpu_bo *bo; in amdgpu_vce_cs_reloc() local
536 mapping = amdgpu_cs_find_mapping(p, addr, &bo); in amdgpu_vce_cs_reloc()
551 addr += amdgpu_bo_gpu_offset(bo); in amdgpu_vce_cs_reloc()
Damdgpu_ib.c212 uint64_t addr = amdgpu_bo_gpu_offset(ib->user->bo); in amdgpu_ib_schedule()
Damdgpu_bo_list.c119 entry->tv.bo = &entry->robj->tbo; in amdgpu_bo_list_set()
/linux-4.4.14/drivers/gpu/drm/ttm/
Dttm_bo.c85 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
91 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug()
92 bo->mem.size >> 20); in ttm_bo_mem_space_debug()
100 ttm_mem_type_debug(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
138 struct ttm_buffer_object *bo = in ttm_bo_release_list() local
140 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_release_list()
141 size_t acc_size = bo->acc_size; in ttm_bo_release_list()
143 BUG_ON(atomic_read(&bo->list_kref.refcount)); in ttm_bo_release_list()
144 BUG_ON(atomic_read(&bo->kref.refcount)); in ttm_bo_release_list()
145 BUG_ON(atomic_read(&bo->cpu_writers)); in ttm_bo_release_list()
[all …]
Dttm_bo_vm.c44 static int ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
50 if (likely(!test_bit(TTM_BO_PRIV_FLAG_MOVING, &bo->priv_flags))) in ttm_bo_vm_fault_idle()
56 ret = ttm_bo_wait(bo, false, false, true); in ttm_bo_vm_fault_idle()
70 (void) ttm_bo_wait(bo, false, true, false); in ttm_bo_vm_fault_idle()
77 ret = ttm_bo_wait(bo, false, true, false); in ttm_bo_vm_fault_idle()
88 struct ttm_buffer_object *bo = (struct ttm_buffer_object *) in ttm_bo_vm_fault() local
90 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_vm_fault()
101 &bdev->man[bo->mem.mem_type]; in ttm_bo_vm_fault()
110 ret = ttm_bo_reserve(bo, true, true, false, NULL); in ttm_bo_vm_fault()
118 (void) ttm_bo_wait_unreserved(bo); in ttm_bo_vm_fault()
[all …]
Dttm_execbuf_util.c39 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation_reverse() local
41 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation_reverse()
50 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_del_from_lru_locked() local
51 unsigned put_count = ttm_bo_del_from_lru(bo); in ttm_eu_del_from_lru_locked()
53 ttm_bo_list_ref_sub(bo, put_count, true); in ttm_eu_del_from_lru_locked()
67 glob = entry->bo->glob; in ttm_eu_backoff_reservation()
71 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation() local
73 ttm_bo_add_to_lru(bo); in ttm_eu_backoff_reservation()
74 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation()
107 glob = entry->bo->glob; in ttm_eu_reserve_buffers()
[all …]
Dttm_bo_util.c42 void ttm_bo_free_old_node(struct ttm_buffer_object *bo) in ttm_bo_free_old_node() argument
44 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_free_old_node()
47 int ttm_bo_move_ttm(struct ttm_buffer_object *bo, in ttm_bo_move_ttm() argument
51 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm()
52 struct ttm_mem_reg *old_mem = &bo->mem; in ttm_bo_move_ttm()
57 ttm_bo_free_old_node(bo); in ttm_bo_move_ttm()
104 struct ttm_buffer_object *bo; in ttm_mem_io_evict() local
109 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
112 list_del_init(&bo->io_reserve_lru); in ttm_mem_io_evict()
113 ttm_bo_unmap_virtual_locked(bo); in ttm_mem_io_evict()
[all …]
Dttm_bo_manager.c51 struct ttm_buffer_object *bo, in ttm_bo_man_get_node() argument
/linux-4.4.14/drivers/gpu/drm/virtio/
Dvirtgpu_object.c30 struct virtio_gpu_object *bo; in virtio_gpu_ttm_bo_destroy() local
33 bo = container_of(tbo, struct virtio_gpu_object, tbo); in virtio_gpu_ttm_bo_destroy()
34 vgdev = (struct virtio_gpu_device *)bo->gem_base.dev->dev_private; in virtio_gpu_ttm_bo_destroy()
36 if (bo->hw_res_handle) in virtio_gpu_ttm_bo_destroy()
37 virtio_gpu_cmd_unref_resource(vgdev, bo->hw_res_handle); in virtio_gpu_ttm_bo_destroy()
38 if (bo->pages) in virtio_gpu_ttm_bo_destroy()
39 virtio_gpu_object_free_sg_table(bo); in virtio_gpu_ttm_bo_destroy()
40 drm_gem_object_release(&bo->gem_base); in virtio_gpu_ttm_bo_destroy()
41 kfree(bo); in virtio_gpu_ttm_bo_destroy()
65 struct virtio_gpu_object *bo; in virtio_gpu_object_create() local
[all …]
Dvirtgpu_ttm.c120 struct ttm_buffer_object *bo;
124 bo = (struct ttm_buffer_object *)vma->vm_private_data;
125 if (bo == NULL)
127 vgdev = virtio_gpu_get_vgdev(bo->bdev);
169 struct ttm_buffer_object *bo, in ttm_bo_man_get_node() argument
235 static void virtio_gpu_evict_flags(struct ttm_buffer_object *bo, in virtio_gpu_evict_flags() argument
251 static int virtio_gpu_verify_access(struct ttm_buffer_object *bo, in virtio_gpu_verify_access() argument
363 static void virtio_gpu_move_null(struct ttm_buffer_object *bo, in virtio_gpu_move_null() argument
366 struct ttm_mem_reg *old_mem = &bo->mem; in virtio_gpu_move_null()
373 static int virtio_gpu_bo_move(struct ttm_buffer_object *bo, in virtio_gpu_bo_move() argument
[all …]
Dvirtgpu_drv.h356 int virtio_gpu_object_kmap(struct virtio_gpu_object *bo, void **ptr);
358 struct virtio_gpu_object *bo);
359 void virtio_gpu_object_free_sg_table(struct virtio_gpu_object *bo);
360 int virtio_gpu_object_wait(struct virtio_gpu_object *bo, bool no_wait);
375 virtio_gpu_object_ref(struct virtio_gpu_object *bo) in virtio_gpu_object_ref() argument
377 ttm_bo_reference(&bo->tbo); in virtio_gpu_object_ref()
378 return bo; in virtio_gpu_object_ref()
381 static inline void virtio_gpu_object_unref(struct virtio_gpu_object **bo) in virtio_gpu_object_unref() argument
385 if ((*bo) == NULL) in virtio_gpu_object_unref()
387 tbo = &((*bo)->tbo); in virtio_gpu_object_unref()
[all …]
Dvirtgpu_ioctl.c59 struct ttm_buffer_object *bo; in virtio_gpu_object_list_validate() local
68 bo = buf->bo; in virtio_gpu_object_list_validate()
69 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_object_list_validate()
70 ret = ttm_bo_validate(bo, &qobj->placement, false, false); in virtio_gpu_object_list_validate()
82 struct ttm_buffer_object *bo; in virtio_gpu_unref_list() local
85 bo = buf->bo; in virtio_gpu_unref_list()
86 qobj = container_of(bo, struct virtio_gpu_object, tbo); in virtio_gpu_unref_list()
145 buflist[i].bo = &qobj->tbo; in virtio_gpu_execbuffer()
277 mainbuf.bo = &qobj->tbo; in virtio_gpu_resource_create_ioctl()
Dvirtgpu_plane.c68 struct virtio_gpu_object *bo; in virtio_gpu_plane_atomic_update() local
73 bo = gem_to_virtio_gpu_obj(vgfb->obj); in virtio_gpu_plane_atomic_update()
74 handle = bo->hw_res_handle; in virtio_gpu_plane_atomic_update()
Dvirtgpu_display.c138 struct virtio_gpu_object *bo; in virtio_gpu_page_flip() local
144 bo = gem_to_virtio_gpu_obj(vgfb->obj); in virtio_gpu_page_flip()
145 handle = bo->hw_res_handle; in virtio_gpu_page_flip()
148 bo->dumb ? ", dumb" : "", in virtio_gpu_page_flip()
150 if (bo->dumb) { in virtio_gpu_page_flip()
222 struct virtio_gpu_object *bo; in virtio_gpu_framebuffer_init() local
225 bo = gem_to_virtio_gpu_obj(obj); in virtio_gpu_framebuffer_init()
/linux-4.4.14/drivers/gpu/drm/ast/
Dast_ttm.c97 struct ast_bo *bo; in ast_bo_ttm_destroy() local
99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in ast_bo_ttm_destroy()
102 kfree(bo); in ast_bo_ttm_destroy()
105 static bool ast_ttm_bo_is_ast_bo(struct ttm_buffer_object *bo) in ast_ttm_bo_is_ast_bo() argument
107 if (bo->destroy == &ast_bo_ttm_destroy) in ast_ttm_bo_is_ast_bo()
138 ast_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in ast_bo_evict_flags() argument
140 struct ast_bo *astbo = ast_bo(bo); in ast_bo_evict_flags()
142 if (!ast_ttm_bo_is_ast_bo(bo)) in ast_bo_evict_flags()
149 static int ast_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in ast_bo_verify_access() argument
[all …]
Dast_drv.h321 struct ttm_buffer_object bo; member
331 ast_bo(struct ttm_buffer_object *bo) in ast_bo() argument
333 return container_of(bo, struct ast_bo, bo); in ast_bo()
364 int ast_bo_pin(struct ast_bo *bo, u32 pl_flag, u64 *gpu_addr);
365 int ast_bo_unpin(struct ast_bo *bo);
367 static inline int ast_bo_reserve(struct ast_bo *bo, bool no_wait) in ast_bo_reserve() argument
371 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in ast_bo_reserve()
374 DRM_ERROR("reserve failed %p\n", bo); in ast_bo_reserve()
380 static inline void ast_bo_unreserve(struct ast_bo *bo) in ast_bo_unreserve() argument
382 ttm_bo_unreserve(&bo->bo); in ast_bo_unreserve()
[all …]
Dast_fb.c51 struct ast_bo *bo; in ast_dirty_update() local
61 bo = gem_to_ast_bo(obj); in ast_dirty_update()
69 ret = ast_bo_reserve(bo, true); in ast_dirty_update()
103 if (!bo->kmap.virtual) { in ast_dirty_update()
104 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_dirty_update()
107 ast_bo_unreserve(bo); in ast_dirty_update()
115 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, (x2 - x + 1) * bpp); in ast_dirty_update()
119 ttm_bo_kunmap(&bo->kmap); in ast_dirty_update()
121 ast_bo_unreserve(bo); in ast_dirty_update()
198 struct ast_bo *bo = NULL; in astfb_create() local
[all …]
Dast_mode.c515 struct ast_bo *bo; in ast_crtc_do_set_base() local
523 bo = gem_to_ast_bo(obj); in ast_crtc_do_set_base()
524 ret = ast_bo_reserve(bo, false); in ast_crtc_do_set_base()
527 ast_bo_push_sysram(bo); in ast_crtc_do_set_base()
528 ast_bo_unreserve(bo); in ast_crtc_do_set_base()
533 bo = gem_to_ast_bo(obj); in ast_crtc_do_set_base()
535 ret = ast_bo_reserve(bo, false); in ast_crtc_do_set_base()
539 ret = ast_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in ast_crtc_do_set_base()
541 ast_bo_unreserve(bo); in ast_crtc_do_set_base()
547 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_crtc_do_set_base()
[all …]
Dast_main.c544 static void ast_bo_unref(struct ast_bo **bo) in ast_bo_unref() argument
548 if ((*bo) == NULL) in ast_bo_unref()
551 tbo = &((*bo)->bo); in ast_bo_unref()
553 *bo = NULL; in ast_bo_unref()
564 static inline u64 ast_bo_mmap_offset(struct ast_bo *bo) in ast_bo_mmap_offset() argument
566 return drm_vma_node_offset_addr(&bo->bo.vma_node); in ast_bo_mmap_offset()
575 struct ast_bo *bo; in ast_dumb_mmap_offset() local
581 bo = gem_to_ast_bo(obj); in ast_dumb_mmap_offset()
582 *offset = ast_bo_mmap_offset(bo); in ast_dumb_mmap_offset()
/linux-4.4.14/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c97 struct mgag200_bo *bo; in mgag200_bo_ttm_destroy() local
99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in mgag200_bo_ttm_destroy()
102 kfree(bo); in mgag200_bo_ttm_destroy()
105 static bool mgag200_ttm_bo_is_mgag200_bo(struct ttm_buffer_object *bo) in mgag200_ttm_bo_is_mgag200_bo() argument
107 if (bo->destroy == &mgag200_bo_ttm_destroy) in mgag200_ttm_bo_is_mgag200_bo()
138 mgag200_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in mgag200_bo_evict_flags() argument
140 struct mgag200_bo *mgabo = mgag200_bo(bo); in mgag200_bo_evict_flags()
142 if (!mgag200_ttm_bo_is_mgag200_bo(bo)) in mgag200_bo_evict_flags()
149 static int mgag200_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in mgag200_bo_verify_access() argument
[all …]
Dmgag200_drv.h224 struct ttm_buffer_object bo; member
234 mgag200_bo(struct ttm_buffer_object *bo) in mgag200_bo() argument
236 return container_of(bo, struct mgag200_bo, bo); in mgag200_bo()
278 void mgag200_ttm_placement(struct mgag200_bo *bo, int domain);
280 static inline int mgag200_bo_reserve(struct mgag200_bo *bo, bool no_wait) in mgag200_bo_reserve() argument
284 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in mgag200_bo_reserve()
287 DRM_ERROR("reserve failed %p\n", bo); in mgag200_bo_reserve()
293 static inline void mgag200_bo_unreserve(struct mgag200_bo *bo) in mgag200_bo_unreserve() argument
295 ttm_bo_unreserve(&bo->bo); in mgag200_bo_unreserve()
303 int mgag200_bo_pin(struct mgag200_bo *bo, u32 pl_flag, u64 *gpu_addr);
[all …]
Dmgag200_cursor.c44 struct mgag200_bo *bo = NULL; in mga_crtc_cursor_set() local
112 bo = gem_to_mga_bo(obj); in mga_crtc_cursor_set()
113 ret = mgag200_bo_reserve(bo, true); in mga_crtc_cursor_set()
118 if (!bo->kmap.virtual) { in mga_crtc_cursor_set()
119 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_crtc_cursor_set()
129 this_colour = ioread32(bo->kmap.virtual + i); in mga_crtc_cursor_set()
182 ret = ttm_bo_kmap(&pixels_prev->bo, 0, in mga_crtc_cursor_set()
183 pixels_prev->bo.num_pages, in mga_crtc_cursor_set()
195 this_colour = ioread32(bo->kmap.virtual + 4*(col + 64*row)); in mga_crtc_cursor_set()
241 ttm_bo_kunmap(&bo->kmap); in mga_crtc_cursor_set()
[all …]
Dmgag200_fb.c27 struct mgag200_bo *bo; in mga_dirty_update() local
37 bo = gem_to_mga_bo(obj); in mga_dirty_update()
45 ret = mgag200_bo_reserve(bo, true); in mga_dirty_update()
79 if (!bo->kmap.virtual) { in mga_dirty_update()
80 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_dirty_update()
83 mgag200_bo_unreserve(bo); in mga_dirty_update()
91 memcpy_toio(bo->kmap.virtual + src_offset, mfbdev->sysram + src_offset, (x2 - x + 1) * bpp); in mga_dirty_update()
95 ttm_bo_kunmap(&bo->kmap); in mga_dirty_update()
97 mgag200_bo_unreserve(bo); in mga_dirty_update()
Dmgag200_main.c327 static void mgag200_bo_unref(struct mgag200_bo **bo) in mgag200_bo_unref() argument
331 if ((*bo) == NULL) in mgag200_bo_unref()
334 tbo = &((*bo)->bo); in mgag200_bo_unref()
336 *bo = NULL; in mgag200_bo_unref()
347 static inline u64 mgag200_bo_mmap_offset(struct mgag200_bo *bo) in mgag200_bo_mmap_offset() argument
349 return drm_vma_node_offset_addr(&bo->bo.vma_node); in mgag200_bo_mmap_offset()
359 struct mgag200_bo *bo; in mgag200_dumb_mmap_offset() local
365 bo = gem_to_mga_bo(obj); in mgag200_dumb_mmap_offset()
366 *offset = mgag200_bo_mmap_offset(bo); in mgag200_dumb_mmap_offset()
Dmgag200_mode.c830 struct mgag200_bo *bo; in mga_crtc_do_set_base() local
838 bo = gem_to_mga_bo(obj); in mga_crtc_do_set_base()
839 ret = mgag200_bo_reserve(bo, false); in mga_crtc_do_set_base()
842 mgag200_bo_push_sysram(bo); in mga_crtc_do_set_base()
843 mgag200_bo_unreserve(bo); in mga_crtc_do_set_base()
848 bo = gem_to_mga_bo(obj); in mga_crtc_do_set_base()
850 ret = mgag200_bo_reserve(bo, false); in mga_crtc_do_set_base()
854 ret = mgag200_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in mga_crtc_do_set_base()
856 mgag200_bo_unreserve(bo); in mga_crtc_do_set_base()
862 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_crtc_do_set_base()
[all …]
/linux-4.4.14/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c97 struct cirrus_bo *bo; in cirrus_bo_ttm_destroy() local
99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in cirrus_bo_ttm_destroy()
102 kfree(bo); in cirrus_bo_ttm_destroy()
105 static bool cirrus_ttm_bo_is_cirrus_bo(struct ttm_buffer_object *bo) in cirrus_ttm_bo_is_cirrus_bo() argument
107 if (bo->destroy == &cirrus_bo_ttm_destroy) in cirrus_ttm_bo_is_cirrus_bo()
138 cirrus_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in cirrus_bo_evict_flags() argument
140 struct cirrus_bo *cirrusbo = cirrus_bo(bo); in cirrus_bo_evict_flags()
142 if (!cirrus_ttm_bo_is_cirrus_bo(bo)) in cirrus_bo_evict_flags()
149 static int cirrus_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in cirrus_bo_verify_access() argument
[all …]
Dcirrus_drv.h164 struct ttm_buffer_object bo; member
174 cirrus_bo(struct ttm_buffer_object *bo) in cirrus_bo() argument
176 return container_of(bo, struct cirrus_bo, bo); in cirrus_bo()
240 void cirrus_ttm_placement(struct cirrus_bo *bo, int domain);
245 static inline int cirrus_bo_reserve(struct cirrus_bo *bo, bool no_wait) in cirrus_bo_reserve() argument
249 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in cirrus_bo_reserve()
252 DRM_ERROR("reserve failed %p\n", bo); in cirrus_bo_reserve()
258 static inline void cirrus_bo_unreserve(struct cirrus_bo *bo) in cirrus_bo_unreserve() argument
260 ttm_bo_unreserve(&bo->bo); in cirrus_bo_unreserve()
263 int cirrus_bo_push_sysram(struct cirrus_bo *bo);
[all …]
Dcirrus_fbdev.c25 struct cirrus_bo *bo; in cirrus_dirty_update() local
35 bo = gem_to_cirrus_bo(obj); in cirrus_dirty_update()
43 ret = cirrus_bo_reserve(bo, true); in cirrus_dirty_update()
76 if (!bo->kmap.virtual) { in cirrus_dirty_update()
77 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_dirty_update()
80 cirrus_bo_unreserve(bo); in cirrus_dirty_update()
88 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, width * bpp); in cirrus_dirty_update()
92 ttm_bo_kunmap(&bo->kmap); in cirrus_dirty_update()
94 cirrus_bo_unreserve(bo); in cirrus_dirty_update()
174 struct cirrus_bo *bo = NULL; in cirrusfb_create() local
[all …]
Dcirrus_main.c264 static void cirrus_bo_unref(struct cirrus_bo **bo) in cirrus_bo_unref() argument
268 if ((*bo) == NULL) in cirrus_bo_unref()
271 tbo = &((*bo)->bo); in cirrus_bo_unref()
273 *bo = NULL; in cirrus_bo_unref()
284 static inline u64 cirrus_bo_mmap_offset(struct cirrus_bo *bo) in cirrus_bo_mmap_offset() argument
286 return drm_vma_node_offset_addr(&bo->bo.vma_node); in cirrus_bo_mmap_offset()
296 struct cirrus_bo *bo; in cirrus_dumb_mmap_offset() local
302 bo = gem_to_cirrus_bo(obj); in cirrus_dumb_mmap_offset()
303 *offset = cirrus_bo_mmap_offset(bo); in cirrus_dumb_mmap_offset()
Dcirrus_mode.c137 struct cirrus_bo *bo; in cirrus_crtc_do_set_base() local
145 bo = gem_to_cirrus_bo(obj); in cirrus_crtc_do_set_base()
146 ret = cirrus_bo_reserve(bo, false); in cirrus_crtc_do_set_base()
149 cirrus_bo_push_sysram(bo); in cirrus_crtc_do_set_base()
150 cirrus_bo_unreserve(bo); in cirrus_crtc_do_set_base()
155 bo = gem_to_cirrus_bo(obj); in cirrus_crtc_do_set_base()
157 ret = cirrus_bo_reserve(bo, false); in cirrus_crtc_do_set_base()
161 ret = cirrus_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in cirrus_crtc_do_set_base()
163 cirrus_bo_unreserve(bo); in cirrus_crtc_do_set_base()
169 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_crtc_do_set_base()
[all …]
/linux-4.4.14/drivers/gpu/drm/vmwgfx/
Dvmwgfx_dmabuf.c50 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_pin_in_placement() local
59 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL); in vmw_dmabuf_pin_in_placement()
63 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_pin_in_placement()
67 ttm_bo_unreserve(bo); in vmw_dmabuf_pin_in_placement()
92 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_pin_in_vram_or_gmr() local
101 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL); in vmw_dmabuf_pin_in_vram_or_gmr()
105 ret = ttm_bo_validate(bo, &vmw_vram_gmr_placement, interruptible, in vmw_dmabuf_pin_in_vram_or_gmr()
110 ret = ttm_bo_validate(bo, &vmw_vram_placement, interruptible, false); in vmw_dmabuf_pin_in_vram_or_gmr()
116 ttm_bo_unreserve(bo); in vmw_dmabuf_pin_in_vram_or_gmr()
160 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_pin_in_start_of_vram() local
[all …]
Dvmwgfx_resource.c74 vmw_dma_buffer(struct ttm_buffer_object *bo) in vmw_dma_buffer() argument
76 return container_of(bo, struct vmw_dma_buffer, base); in vmw_dma_buffer()
80 vmw_user_dma_buffer(struct ttm_buffer_object *bo) in vmw_user_dma_buffer() argument
82 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_user_dma_buffer()
130 struct ttm_buffer_object *bo = &res->backup->base; in vmw_resource_release() local
132 ttm_bo_reserve(bo, false, false, false, NULL); in vmw_resource_release()
137 val_buf.bo = bo; in vmw_resource_release()
143 ttm_bo_unreserve(bo); in vmw_resource_release()
396 void vmw_dmabuf_bo_free(struct ttm_buffer_object *bo) in vmw_dmabuf_bo_free() argument
398 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_dmabuf_bo_free()
[all …]
Dvmwgfx_mob.c199 struct ttm_buffer_object *bo; in vmw_takedown_otable_base() local
204 bo = otable->page_table->pt_bo; in vmw_takedown_otable_base()
222 if (bo) { in vmw_takedown_otable_base()
225 ret = ttm_bo_reserve(bo, false, true, false, NULL); in vmw_takedown_otable_base()
228 vmw_fence_single_bo(bo, NULL); in vmw_takedown_otable_base()
229 ttm_bo_unreserve(bo); in vmw_takedown_otable_base()
352 struct ttm_buffer_object *bo = batch->otable_bo; in vmw_otable_batch_takedown() local
360 ret = ttm_bo_reserve(bo, false, true, false, NULL); in vmw_otable_batch_takedown()
363 vmw_fence_single_bo(bo, NULL); in vmw_otable_batch_takedown()
364 ttm_bo_unreserve(bo); in vmw_otable_batch_takedown()
[all …]
Dvmwgfx_cotable.c164 struct ttm_buffer_object *bo = &res->backup->base; in vmw_cotable_unscrub() local
170 WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB); in vmw_cotable_unscrub()
171 lockdep_assert_held(&bo->resv->lock.base); in vmw_cotable_unscrub()
181 WARN_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_cotable_unscrub()
186 cmd->body.mobid = bo->mem.start; in vmw_cotable_unscrub()
215 val_buf->bo = &res->backup->base; in vmw_cotable_bind()
312 struct ttm_buffer_object *bo = val_buf->bo; in vmw_cotable_unbind() local
318 WARN_ON_ONCE(bo->mem.mem_type != VMW_PL_MOB); in vmw_cotable_unbind()
319 lockdep_assert_held(&bo->resv->lock.base); in vmw_cotable_unbind()
326 vmw_fence_single_bo(bo, fence); in vmw_cotable_unbind()
[all …]
Dvmwgfx_buffer.c514 int vmw_bo_map_dma(struct ttm_buffer_object *bo) in vmw_bo_map_dma() argument
517 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_map_dma()
531 void vmw_bo_unmap_dma(struct ttm_buffer_object *bo) in vmw_bo_unmap_dma() argument
534 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_unmap_dma()
551 const struct vmw_sg_table *vmw_bo_sg_table(struct ttm_buffer_object *bo) in vmw_bo_sg_table() argument
554 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_sg_table()
766 static void vmw_evict_flags(struct ttm_buffer_object *bo, in vmw_evict_flags() argument
772 static int vmw_verify_access(struct ttm_buffer_object *bo, struct file *filp) in vmw_verify_access() argument
777 return vmw_user_dmabuf_verify_access(bo, tfile); in vmw_verify_access()
812 static int vmw_ttm_fault_reserve_notify(struct ttm_buffer_object *bo) in vmw_ttm_fault_reserve_notify() argument
[all …]
Dvmwgfx_context.c345 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_context_bind() local
347 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_context_bind()
358 cmd->body.mobid = bo->mem.start; in vmw_gb_context_bind()
371 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_context_unbind() local
388 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_context_unbind()
426 vmw_fence_single_bo(bo, fence); in vmw_gb_context_unbind()
521 struct ttm_buffer_object *bo = val_buf->bo; in vmw_dx_context_bind() local
523 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_dx_context_bind()
535 cmd->body.mobid = bo->mem.start; in vmw_dx_context_bind()
588 struct ttm_buffer_object *bo = val_buf->bo; in vmw_dx_context_unbind() local
[all …]
Dvmwgfx_drv.h619 extern void vmw_dmabuf_bo_free(struct ttm_buffer_object *bo);
624 void (*bo_free) (struct ttm_buffer_object *bo));
625 extern int vmw_user_dmabuf_verify_access(struct ttm_buffer_object *bo,
643 extern uint32_t vmw_dmabuf_validate_node(struct ttm_buffer_object *bo,
645 extern void vmw_dmabuf_validate_clear(struct ttm_buffer_object *bo);
661 extern void vmw_resource_move_notify(struct ttm_buffer_object *bo,
663 extern void vmw_query_move_notify(struct ttm_buffer_object *bo,
666 extern void vmw_fence_single_bo(struct ttm_buffer_object *bo,
674 struct vmw_dma_buffer *bo,
684 struct vmw_dma_buffer *bo,
[all …]
Dvmwgfx_gmrid_manager.c48 struct ttm_buffer_object *bo, in vmw_gmrid_man_get_node() argument
62 gman->used_gmr_pages += bo->num_pages; in vmw_gmrid_man_get_node()
86 mem->num_pages = bo->num_pages; in vmw_gmrid_man_get_node()
96 gman->used_gmr_pages -= bo->num_pages; in vmw_gmrid_man_get_node()
Dvmwgfx_fifo.c598 struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base; in vmw_fifo_emit_dummy_legacy_query() local
616 if (bo->mem.mem_type == TTM_PL_VRAM) { in vmw_fifo_emit_dummy_legacy_query()
618 cmd->body.guestResult.offset = bo->offset; in vmw_fifo_emit_dummy_legacy_query()
620 cmd->body.guestResult.gmrId = bo->mem.start; in vmw_fifo_emit_dummy_legacy_query()
647 struct ttm_buffer_object *bo = &dev_priv->dummy_query_bo->base; in vmw_fifo_emit_dummy_gb_query() local
664 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_fifo_emit_dummy_gb_query()
665 cmd->body.mobid = bo->mem.start; in vmw_fifo_emit_dummy_gb_query()
Dvmwgfx_shader.c252 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_shader_bind() local
254 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_shader_bind()
266 cmd->body.mobid = bo->mem.start; in vmw_gb_shader_bind()
308 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_shader_unbind()
460 struct ttm_buffer_object *bo = val_buf->bo; in vmw_dx_shader_bind() local
462 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_dx_shader_bind()
539 vmw_fence_single_bo(val_buf->bo, fence); in vmw_dx_shader_unbind()
Dvmwgfx_surface.c449 BUG_ON(val_buf->bo == NULL); in vmw_legacy_srf_dma()
458 vmw_bo_get_guest_ptr(val_buf->bo, &ptr); in vmw_legacy_srf_dma()
470 vmw_fence_single_bo(val_buf->bo, fence); in vmw_legacy_srf_dma()
1134 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_surface_bind() local
1136 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_surface_bind()
1150 cmd1->body.mobid = bo->mem.start; in vmw_gb_surface_bind()
1168 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_surface_unbind() local
1187 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_surface_unbind()
1225 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_surface_unbind()
Dvmwgfx_kms.c266 struct ttm_buffer_object *bo, in vmw_kms_cursor_snoop() argument
321 ret = ttm_bo_reserve(bo, true, false, false, NULL); in vmw_kms_cursor_snoop()
327 ret = ttm_bo_kmap(bo, kmap_offset, kmap_num, &map); in vmw_kms_cursor_snoop()
347 ttm_bo_unreserve(bo); in vmw_kms_cursor_snoop()
981 struct vmw_dma_buffer *bo = NULL; in vmw_kms_fb_create() local
1028 &surface, &bo); in vmw_kms_fb_create()
1032 vfb = vmw_kms_new_framebuffer(dev_priv, bo, surface, in vmw_kms_fb_create()
1042 if (bo) in vmw_kms_fb_create()
1043 vmw_dmabuf_unreference(&bo); in vmw_kms_fb_create()
1838 struct ttm_buffer_object *bo = &buf->base; in vmw_kms_helper_buffer_prepare() local
[all …]
Dvmwgfx_execbuf.c534 val_buf->bo = ttm_bo_reference(&vbo->base); in vmw_bo_to_validate_list()
3464 struct ttm_buffer_object *bo; in vmw_apply_relocations() local
3469 bo = validate->bo; in vmw_apply_relocations()
3470 switch (bo->mem.mem_type) { in vmw_apply_relocations()
3472 reloc->location->offset += bo->offset; in vmw_apply_relocations()
3476 reloc->location->gmrId = bo->mem.start; in vmw_apply_relocations()
3479 *reloc->mob_loc = bo->mem.start; in vmw_apply_relocations()
3530 ttm_bo_unref(&entry->base.bo); in vmw_clear_validations()
3541 struct ttm_buffer_object *bo, in vmw_validate_single_buffer() argument
3545 struct vmw_dma_buffer *vbo = container_of(bo, struct vmw_dma_buffer, in vmw_validate_single_buffer()
[all …]
/linux-4.4.14/drivers/gpu/drm/bochs/
Dbochs_mm.c10 static void bochs_ttm_placement(struct bochs_bo *bo, int domain);
76 struct bochs_bo *bo; in bochs_bo_ttm_destroy() local
78 bo = container_of(tbo, struct bochs_bo, bo); in bochs_bo_ttm_destroy()
79 drm_gem_object_release(&bo->gem); in bochs_bo_ttm_destroy()
80 kfree(bo); in bochs_bo_ttm_destroy()
83 static bool bochs_ttm_bo_is_bochs_bo(struct ttm_buffer_object *bo) in bochs_ttm_bo_is_bochs_bo() argument
85 if (bo->destroy == &bochs_bo_ttm_destroy) in bochs_ttm_bo_is_bochs_bo()
115 bochs_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in bochs_bo_evict_flags() argument
117 struct bochs_bo *bochsbo = bochs_bo(bo); in bochs_bo_evict_flags()
119 if (!bochs_ttm_bo_is_bochs_bo(bo)) in bochs_bo_evict_flags()
[all …]
Dbochs_fbdev.c18 struct bochs_bo *bo = gem_to_bochs_bo(bochs->fb.gfb.obj); in bochsfb_mmap() local
20 return ttm_fbdev_mmap(vma, &bo->bo); in bochsfb_mmap()
63 struct bochs_bo *bo = NULL; in bochsfb_create() local
83 bo = gem_to_bochs_bo(gobj); in bochsfb_create()
85 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochsfb_create()
89 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, NULL); in bochsfb_create()
92 ttm_bo_unreserve(&bo->bo); in bochsfb_create()
96 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, in bochsfb_create()
97 &bo->kmap); in bochsfb_create()
100 ttm_bo_unreserve(&bo->bo); in bochsfb_create()
[all …]
Dbochs_kms.c46 struct bochs_bo *bo; in bochs_crtc_mode_set_base() local
52 bo = gem_to_bochs_bo(bochs_fb->obj); in bochs_crtc_mode_set_base()
53 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochs_crtc_mode_set_base()
57 bochs_bo_unpin(bo); in bochs_crtc_mode_set_base()
58 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
66 bo = gem_to_bochs_bo(bochs_fb->obj); in bochs_crtc_mode_set_base()
67 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochs_crtc_mode_set_base()
71 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in bochs_crtc_mode_set_base()
73 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
77 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
Dbochs.h100 struct ttm_buffer_object bo; member
108 static inline struct bochs_bo *bochs_bo(struct ttm_buffer_object *bo) in bochs_bo() argument
110 return container_of(bo, struct bochs_bo, bo); in bochs_bo()
120 static inline u64 bochs_bo_mmap_offset(struct bochs_bo *bo) in bochs_bo_mmap_offset() argument
122 return drm_vma_node_offset_addr(&bo->bo.vma_node); in bochs_bo_mmap_offset()
154 int bochs_bo_pin(struct bochs_bo *bo, u32 pl_flag, u64 *gpu_addr);
155 int bochs_bo_unpin(struct bochs_bo *bo);
/linux-4.4.14/include/linux/
Dhost1x.h61 struct host1x_bo *(*get)(struct host1x_bo *bo);
62 void (*put)(struct host1x_bo *bo);
63 dma_addr_t (*pin)(struct host1x_bo *bo, struct sg_table **sgt);
64 void (*unpin)(struct host1x_bo *bo, struct sg_table *sgt);
65 void *(*mmap)(struct host1x_bo *bo);
66 void (*munmap)(struct host1x_bo *bo, void *addr);
67 void *(*kmap)(struct host1x_bo *bo, unsigned int pagenum);
68 void (*kunmap)(struct host1x_bo *bo, unsigned int pagenum, void *addr);
75 static inline void host1x_bo_init(struct host1x_bo *bo, in host1x_bo_init() argument
78 bo->ops = ops; in host1x_bo_init()
[all …]
/linux-4.4.14/include/drm/ttm/
Dttm_bo_driver.h210 struct ttm_buffer_object *bo,
381 void(*evict_flags) (struct ttm_buffer_object *bo,
396 int (*move) (struct ttm_buffer_object *bo,
413 int (*verify_access) (struct ttm_buffer_object *bo,
418 void (*move_notify)(struct ttm_buffer_object *bo,
422 int (*fault_reserve_notify)(struct ttm_buffer_object *bo);
427 void (*swap_notify) (struct ttm_buffer_object *bo);
694 extern int ttm_bo_mem_space(struct ttm_buffer_object *bo,
700 extern void ttm_bo_mem_put(struct ttm_buffer_object *bo,
702 extern void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo,
[all …]
Dttm_bo_api.h285 struct ttm_buffer_object *bo; member
297 ttm_bo_reference(struct ttm_buffer_object *bo) in ttm_bo_reference() argument
299 kref_get(&bo->kref); in ttm_bo_reference()
300 return bo; in ttm_bo_reference()
317 extern int ttm_bo_wait(struct ttm_buffer_object *bo, bool lazy,
335 extern int ttm_bo_validate(struct ttm_buffer_object *bo,
347 extern void ttm_bo_unref(struct ttm_buffer_object **bo);
359 extern void ttm_bo_list_ref_sub(struct ttm_buffer_object *bo, int count,
372 extern void ttm_bo_add_to_lru(struct ttm_buffer_object *bo);
384 extern int ttm_bo_del_from_lru(struct ttm_buffer_object *bo);
[all …]
Dttm_execbuf_util.h47 struct ttm_buffer_object *bo; member
/linux-4.4.14/drivers/gpu/drm/nouveau/
Dnouveau_bo.c129 nouveau_bo_del_ttm(struct ttm_buffer_object *bo) in nouveau_bo_del_ttm() argument
131 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm()
133 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm()
136 DRM_ERROR("bo %p still attached to GEM object\n", bo); in nouveau_bo_del_ttm()
146 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
210 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
222 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new()
228 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
257 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in set_placement_range()
263 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range()
[all …]
Dnv50_fence.c40 struct ttm_mem_reg *mem = &priv->bo->bo.mem; in nv50_fence_context_new()
65 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv50_fence_context_new() local
66 u32 start = bo->bo.mem.start * PAGE_SIZE; in nv50_fence_context_new()
67 u32 limit = start + bo->bo.mem.size - 1; in nv50_fence_context_new()
103 0, 0x0000, NULL, NULL, &priv->bo); in nv50_fence_create()
105 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false); in nv50_fence_create()
107 ret = nouveau_bo_map(priv->bo); in nv50_fence_create()
109 nouveau_bo_unpin(priv->bo); in nv50_fence_create()
112 nouveau_bo_ref(NULL, &priv->bo); in nv50_fence_create()
120 nouveau_bo_wr32(priv->bo, 0x000, 0x00000000); in nv50_fence_create()
Dnouveau_gem.c39 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_del()
40 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_gem_object_del() local
49 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del()
55 ttm_bo_unref(&bo); in nouveau_gem_object_del()
66 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_open()
74 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, NULL); in nouveau_gem_object_open()
103 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_open()
119 const bool mapped = nvbo->bo.mem.mem_type != TTM_PL_SYSTEM; in nouveau_gem_object_unmap()
120 struct reservation_object *resv = nvbo->bo.resv; in nouveau_gem_object_unmap()
129 ttm_bo_wait(&nvbo->bo, true, false, false); in nouveau_gem_object_unmap()
[all …]
Dnv84_fence.c107 return nouveau_bo_rd32(priv->bo, chan->chid * 16/4); in nv84_fence_read()
119 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv84_fence_context_del() local
120 nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]); in nv84_fence_context_del()
123 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); in nv84_fence_context_del()
124 nouveau_bo_vma_del(priv->bo, &fctx->vma_gart); in nv84_fence_context_del()
125 nouveau_bo_vma_del(priv->bo, &fctx->vma); in nv84_fence_context_del()
151 ret = nouveau_bo_vma_add(priv->bo, cli->vm, &fctx->vma); in nv84_fence_context_new()
159 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); in nv84_fence_context_new() local
160 ret = nouveau_bo_vma_add(bo, cli->vm, &fctx->dispc_vma[i]); in nv84_fence_context_new()
177 priv->suspend[i] = nouveau_bo_rd32(priv->bo, i*4); in nv84_fence_suspend()
[all …]
Dnouveau_bo.h11 struct ttm_buffer_object bo; member
45 nouveau_bo(struct ttm_buffer_object *bo) in nouveau_bo() argument
47 return container_of(bo, struct nouveau_bo, bo); in nouveau_bo()
59 *pnvbo = ref ? nouveau_bo(ttm_bo_reference(&ref->bo)) : NULL; in nouveau_bo_ref()
61 struct ttm_buffer_object *bo = &prev->bo; in nouveau_bo_ref() local
63 ttm_bo_unref(&bo); in nouveau_bo_ref()
Dnv17_fence.c78 struct ttm_mem_reg *mem = &priv->bo->bo.mem; in nv17_fence_context_new()
110 nouveau_bo_wr32(priv->bo, 0, priv->sequence); in nv17_fence_resume()
132 0, 0x0000, NULL, NULL, &priv->bo); in nv17_fence_create()
134 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false); in nv17_fence_create()
136 ret = nouveau_bo_map(priv->bo); in nv17_fence_create()
138 nouveau_bo_unpin(priv->bo); in nv17_fence_create()
141 nouveau_bo_ref(NULL, &priv->bo); in nv17_fence_create()
149 nouveau_bo_wr32(priv->bo, 0x000, 0x00000000); in nv17_fence_create()
Dnouveau_prime.c34 int npages = nvbo->bo.num_pages; in nouveau_gem_prime_get_sg_table()
36 return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages); in nouveau_gem_prime_get_sg_table()
44 ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.num_pages, in nouveau_gem_prime_vmap()
81 ret = drm_gem_object_init(dev, &nvbo->gem, nvbo->bo.mem.size); in nouveau_gem_prime_import_sg_table()
114 return nvbo->bo.resv; in nouveau_gem_prime_res_obj()
Dnv10_fence.c89 nouveau_bo_unmap(priv->bo); in nv10_fence_destroy()
90 if (priv->bo) in nv10_fence_destroy()
91 nouveau_bo_unpin(priv->bo); in nv10_fence_destroy()
92 nouveau_bo_ref(NULL, &priv->bo); in nv10_fence_destroy()
Dnouveau_display.c657 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); in nouveau_display_resume()
741 ret = ttm_bo_reserve(&new_bo->bo, true, false, false, NULL); in nouveau_crtc_page_flip()
748 ttm_bo_unreserve(&new_bo->bo); in nouveau_crtc_page_flip()
753 ttm_bo_unreserve(&new_bo->bo); in nouveau_crtc_page_flip()
755 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL); in nouveau_crtc_page_flip()
764 new_bo->bo.offset }; in nouveau_crtc_page_flip()
805 ttm_bo_unreserve(&old_bo->bo); in nouveau_crtc_page_flip()
813 ttm_bo_unreserve(&old_bo->bo); in nouveau_crtc_page_flip()
888 struct nouveau_bo *bo; in nouveau_display_dumb_create() local
902 ret = nouveau_gem_new(dev, args->size, 0, domain, 0, 0, &bo); in nouveau_display_dumb_create()
[all …]
Dnouveau_ttm.c77 struct ttm_buffer_object *bo, in nouveau_vram_manager_new() argument
83 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_vram_manager_new()
139 struct ttm_buffer_object *bo, in nouveau_gart_manager_new() argument
143 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_gart_manager_new()
144 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_gart_manager_new()
228 struct ttm_buffer_object *bo, in nv04_gart_manager_new() argument
Dnv10_fence.h15 struct nouveau_bo *bo; member
Dnouveau_abi16.c297 if (chan->chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) in nouveau_abi16_ioctl_channel_alloc()
544 args.start += drm->agp.base + chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
545 args.limit += drm->agp.base + chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
549 args.start += chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
550 args.limit += chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
Dnouveau_dma.c82 nv50_dma_push(struct nouveau_channel *chan, struct nouveau_bo *bo, in nv50_dma_push() argument
91 vma = nouveau_bo_vma_find(bo, cli->vm); in nv50_dma_push()
Dnouveau_fence.h99 struct nouveau_bo *bo; member
Dnv50_display.c668 evo_data(push, nv_fb->nvbo->bo.offset >> 8); in nv50_display_flip_next()
675 evo_data(push, nv_fb->nvbo->bo.offset >> 8); in nv50_display_flip_next()
907 evo_data(push, nvfb->nvbo->bo.offset >> 8); in nv50_crtc_set_image()
920 evo_data(push, nvfb->nvbo->bo.offset >> 8); in nv50_crtc_set_image()
950 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
955 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
961 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
1074 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
1081 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
1089 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
[all …]
Dnouveau_fbcon.c417 info->fix.smem_start = nvbo->bo.mem.bus.base + in nouveau_fbcon_create()
418 nvbo->bo.mem.bus.offset; in nouveau_fbcon_create()
438 nvbo->bo.offset, nvbo); in nouveau_fbcon_create()
Dnouveau_chan.c128 chan->push.vma.offset = chan->push.buffer->bo.offset; in nouveau_channel_prep()
143 if (chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) { in nouveau_channel_prep()
Dnouveau_fence.c394 struct reservation_object *resv = nvbo->bo.resv; in nouveau_fence_sync()
/linux-4.4.14/crypto/
Daes_generic.c1300 #define f_rn(bo, bi, n, k) do { \ argument
1301 bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \
1307 #define f_nround(bo, bi, k) do {\ argument
1308 f_rn(bo, bi, 0, k); \
1309 f_rn(bo, bi, 1, k); \
1310 f_rn(bo, bi, 2, k); \
1311 f_rn(bo, bi, 3, k); \
1315 #define f_rl(bo, bi, n, k) do { \ argument
1316 bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \
1322 #define f_lround(bo, bi, k) do {\ argument
[all …]
/linux-4.4.14/drivers/gpu/host1x/
Djob.c100 void host1x_job_add_gather(struct host1x_job *job, struct host1x_bo *bo, in host1x_job_add_gather() argument
106 cur_gather->bo = bo; in host1x_job_add_gather()
155 if (patch != wait->bo) in do_waitchks()
158 trace_host1x_syncpt_wait_check(wait->bo, wait->offset, in do_waitchks()
171 wait->bo = NULL; in do_waitchks()
188 reloc->target.bo = host1x_bo_get(reloc->target.bo); in pin_job()
189 if (!reloc->target.bo) in pin_job()
192 phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); in pin_job()
197 job->unpins[job->num_unpins].bo = reloc->target.bo; in pin_job()
207 g->bo = host1x_bo_get(g->bo); in pin_job()
[all …]
Djob.h25 struct host1x_bo *bo; member
38 struct host1x_bo *bo; member
45 struct host1x_bo *bo; member
/linux-4.4.14/drivers/gpu/drm/msm/
Dmsm_ringbuffer.c35 ring->bo = msm_gem_new(gpu->dev, size, MSM_BO_WC); in msm_ringbuffer_new()
36 if (IS_ERR(ring->bo)) { in msm_ringbuffer_new()
37 ret = PTR_ERR(ring->bo); in msm_ringbuffer_new()
38 ring->bo = NULL; in msm_ringbuffer_new()
42 ring->start = msm_gem_vaddr_locked(ring->bo); in msm_ringbuffer_new()
58 if (ring->bo) in msm_ringbuffer_destroy()
59 drm_gem_object_unreference_unlocked(ring->bo); in msm_ringbuffer_destroy()
Dmsm_fbdev.c37 struct drm_gem_object *bo; member
64 struct drm_gem_object *drm_obj = fbdev->bo; in msm_fbdev_mmap()
108 fbdev->bo = msm_gem_new(dev, size, MSM_BO_SCANOUT | in msm_fbdev_create()
111 if (IS_ERR(fbdev->bo)) { in msm_fbdev_create()
112 ret = PTR_ERR(fbdev->bo); in msm_fbdev_create()
113 fbdev->bo = NULL; in msm_fbdev_create()
118 fb = msm_framebuffer_init(dev, &mode_cmd, &fbdev->bo); in msm_fbdev_create()
124 drm_gem_object_unreference(fbdev->bo); in msm_fbdev_create()
136 ret = msm_gem_get_iova_locked(fbdev->bo, 0, &paddr); in msm_fbdev_create()
165 fbi->screen_base = msm_gem_vaddr_locked(fbdev->bo); in msm_fbdev_create()
[all …]
Dmsm_fb.c51 struct drm_gem_object *bo = msm_fb->planes[i]; in msm_framebuffer_destroy() local
52 if (bo) in msm_framebuffer_destroy()
53 drm_gem_object_unreference_unlocked(bo); in msm_framebuffer_destroy()
Dmsm_ringbuffer.h26 struct drm_gem_object *bo; member
Dmsm_gpu.c697 msm_gem_put_iova(gpu->rb->bo, gpu->id); in msm_gpu_cleanup()
/linux-4.4.14/net/can/
Dbcm.c168 struct bcm_sock *bo = bcm_sk(sk); in bcm_proc_show() local
173 seq_printf(m, " / bo %pK", bo); in bcm_proc_show()
174 seq_printf(m, " / dropped %lu", bo->dropped_usr_msgs); in bcm_proc_show()
175 seq_printf(m, " / bound %s", bcm_proc_getifname(ifname, bo->ifindex)); in bcm_proc_show()
178 list_for_each_entry(op, &bo->rx_ops, list) { in bcm_proc_show()
209 list_for_each_entry(op, &bo->tx_ops, list) { in bcm_proc_show()
345 struct bcm_sock *bo = bcm_sk(sk); in bcm_send_to_user() local
349 bo->dropped_usr_msgs++; in bcm_send_to_user()
836 struct bcm_sock *bo = bcm_sk(sk); in bcm_tx_setup() local
850 op = bcm_find_op(&bo->tx_ops, msg_head->can_id, ifindex); in bcm_tx_setup()
[all …]
/linux-4.4.14/drivers/crypto/vmx/
Dppc-xlate.pl106 my $bo = $f=~/[\+\-]/ ? 16+9 : 16; # optional "to be taken" hint
107 " bc $bo,0,".shift;
111 my $bo = $f=~/\-/ ? 12+2 : 12; # optional "not to be taken" hint
113 " .long ".sprintf "0x%x",19<<26|$bo<<21|16<<1 :
114 " bclr $bo,0";
118 my $bo = $f=~/\-/ ? 4+2 : 4; # optional "not to be taken" hint
120 " .long ".sprintf "0x%x",19<<26|$bo<<21|2<<16|16<<1 :
121 " bclr $bo,2";
125 my $bo = $f=~/-/ ? 12+2 : 12; # optional "not to be taken" hint
127 " .long ".sprintf "0x%X",19<<26|$bo<<21|2<<16|16<<1 :
[all …]
/linux-4.4.14/include/trace/events/
Dhost1x.h84 TP_PROTO(const char *name, struct host1x_bo *bo,
87 TP_ARGS(name, bo, words, offset, cmdbuf),
91 __field(struct host1x_bo *, bo)
105 __entry->bo = bo;
111 __entry->name, __entry->bo,
226 TP_PROTO(struct host1x_bo *bo, u32 offset, u32 syncpt_id, u32 thresh,
229 TP_ARGS(bo, offset, syncpt_id, thresh, min),
232 __field(struct host1x_bo *, bo)
240 __entry->bo = bo;
248 __entry->bo, __entry->offset,
/linux-4.4.14/drivers/gpu/drm/vc4/
Dvc4_bo.c35 struct vc4_bo *bo = NULL; in vc4_dumb_create() local
44 bo = vc4_bo_create(dev, roundup(args->size, PAGE_SIZE)); in vc4_dumb_create()
45 if (!bo) in vc4_dumb_create()
48 ret = drm_gem_handle_create(file_priv, &bo->base.base, &args->handle); in vc4_dumb_create()
49 drm_gem_object_unreference_unlocked(&bo->base.base); in vc4_dumb_create()
Dvc4_drv.h33 to_vc4_bo(struct drm_gem_object *bo) in to_vc4_bo() argument
35 return (struct vc4_bo *)bo; in to_vc4_bo()
Dvc4_plane.c151 struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0); in vc4_plane_mode_set() local
211 vc4_dlist_write(vc4_state, bo->paddr + offset); in vc4_plane_mode_set()
/linux-4.4.14/drivers/gpu/drm/omapdrm/
Domap_fbdev.c38 struct drm_gem_object *bo; member
55 omap_gem_roll(fbdev->bo, fbi->var.yoffset * npages); in pan_worker()
143 fbdev->bo = omap_gem_new(dev, gsize, OMAP_BO_SCANOUT | OMAP_BO_WC); in omap_fbdev_create()
144 if (!fbdev->bo) { in omap_fbdev_create()
150 fb = omap_framebuffer_init(dev, &mode_cmd, &fbdev->bo); in omap_fbdev_create()
156 drm_gem_object_unreference(fbdev->bo); in omap_fbdev_create()
169 ret = omap_gem_get_paddr(fbdev->bo, &paddr, true); in omap_fbdev_create()
202 fbi->screen_base = omap_gem_vaddr(fbdev->bo); in omap_fbdev_create()
203 fbi->screen_size = fbdev->bo->size; in omap_fbdev_create()
205 fbi->fix.smem_len = fbdev->bo->size; in omap_fbdev_create()
[all …]
Domap_fb.c79 struct drm_gem_object *bo; member
102 omap_fb->planes[0].bo, handle); in omap_framebuffer_create_handle()
116 if (plane->bo) in omap_framebuffer_destroy()
117 drm_gem_object_unreference_unlocked(plane->bo); in omap_framebuffer_destroy()
170 if (omap_gem_flags(plane->bo) & OMAP_BO_TILED) { in omap_framebuffer_update_scanout()
208 omap_gem_rotated_paddr(plane->bo, orient, x, y, &info->paddr); in omap_framebuffer_update_scanout()
210 info->screen_width = omap_gem_tiled_stride(plane->bo, orient); in omap_framebuffer_update_scanout()
238 WARN_ON(!(omap_gem_flags(plane->bo) & OMAP_BO_TILED)); in omap_framebuffer_update_scanout()
239 omap_gem_rotated_paddr(plane->bo, orient, in omap_framebuffer_update_scanout()
265 ret = omap_gem_get_paddr(plane->bo, &plane->paddr, true); in omap_framebuffer_pin()
[all …]
/linux-4.4.14/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dctxgf108.c743 u32 bo = 0; in gf108_grctx_generate_attrib() local
744 u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total; in gf108_grctx_generate_attrib()
758 mmio_skip(info, o + 0x20, (t << 28) | (b << 16) | ++bo); in gf108_grctx_generate_attrib()
759 mmio_wr32(info, o + 0x20, (t << 28) | (b << 16) | --bo); in gf108_grctx_generate_attrib()
760 bo += grctx->attrib_nr_max; in gf108_grctx_generate_attrib()
Dctxgf117.c195 u32 bo = 0; in gf117_grctx_generate_attrib() local
196 u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total; in gf117_grctx_generate_attrib()
212 mmio_skip(info, o + 0xc0, (t << 28) | (b << 16) | ++bo); in gf117_grctx_generate_attrib()
213 mmio_wr32(info, o + 0xc0, (t << 28) | (b << 16) | --bo); in gf117_grctx_generate_attrib()
214 bo += grctx->attrib_nr_max * gr->ppc_tpc_nr[gpc][ppc]; in gf117_grctx_generate_attrib()
Dctxgm107.c907 u32 bo = 0; in gm107_grctx_generate_attrib() local
908 u32 ao = bo + grctx->attrib_nr_max * gr->tpc_total; in gm107_grctx_generate_attrib()
924 mmio_wr32(info, o + 0xf4, bo); in gm107_grctx_generate_attrib()
925 bo += grctx->attrib_nr_max * gr->ppc_tpc_nr[gpc][ppc]; in gm107_grctx_generate_attrib()
Dctxgf100.c1064 u32 bo = 0; in gf100_grctx_generate_attrib() local
1073 mmio_skip(info, o, (attrib << 16) | ++bo); in gf100_grctx_generate_attrib()
1074 mmio_wr32(info, o, (attrib << 16) | --bo); in gf100_grctx_generate_attrib()
1075 bo += grctx->attrib_nr_max; in gf100_grctx_generate_attrib()
/linux-4.4.14/drivers/gpu/host1x/hw/
Dchannel_hw.c32 static void trace_write_gather(struct host1x_cdma *cdma, struct host1x_bo *bo, in trace_write_gather() argument
39 mem = host1x_bo_mmap(bo); in trace_write_gather()
51 trace_host1x_cdma_push_gather(dev_name(dev), bo, in trace_write_gather()
56 host1x_bo_munmap(bo, mem); in trace_write_gather()
69 trace_write_gather(cdma, g->bo, g->offset, op1 & 0xffff); in submit_gathers()
Ddebug_hw.c159 mapped = host1x_bo_mmap(g->bo); in show_channel_gathers()
173 host1x_bo_munmap(g->bo, mapped); in show_channel_gathers()
/linux-4.4.14/kernel/trace/
Dtrace_probe.c473 unsigned long bw, bo; in __parse_bitfield_probe_arg() local
492 bo = simple_strtoul(bf, &tail, 0); in __parse_bitfield_probe_arg()
497 bprm->hi_shift = BYTES_TO_BITS(t->size) - (bw + bo); in __parse_bitfield_probe_arg()
498 bprm->low_shift = bprm->hi_shift + bo; in __parse_bitfield_probe_arg()
500 return (BYTES_TO_BITS(t->size) < (bw + bo)) ? -EINVAL : 0; in __parse_bitfield_probe_arg()
/linux-4.4.14/net/mac802154/
Dtrace.h31 #define BOOL_TO_STR(bo) (bo) ? "true" : "false" argument
/linux-4.4.14/drivers/usb/wusbcore/
Dcrypto.c128 u8 *bo = _bo; in bytewise_xor() local
132 bo[itr] = bi1[itr] ^ bi2[itr]; in bytewise_xor()
/linux-4.4.14/drivers/gpu/drm/nouveau/dispnv04/
Doverlay.c140 nvif_wr32(dev, NV_PVIDEO_OFFSET_BUFF(flip), nv_fb->nvbo->bo.offset); in nv10_update_plane()
160 nv_fb->nvbo->bo.offset + fb->offsets[1]); in nv10_update_plane()
389 nv_fb->nvbo->bo.offset); in nv04_update_plane()
Dcrtc.c852 nv_crtc->fb.offset = fb->nvbo->bo.offset; in nv04_crtc_do_mode_set_base()
1020 nv_crtc->cursor.offset = nv_crtc->cursor.nvbo->bo.offset; in nv04_crtc_cursor_set()
/linux-4.4.14/net/ieee802154/
Dtrace.h37 #define BOOL_TO_STR(bo) (bo) ? "true" : "false" argument
/linux-4.4.14/arch/mips/include/asm/xtalk/
Dxwidget.h245 unsigned bo:1; member
/linux-4.4.14/drivers/gpu/drm/gma500/
Dpsb_intel_drv.h97 size_t(*bo_offset) (struct drm_device *dev, void *bo);
/linux-4.4.14/drivers/gpu/ipu-v3/
Dipu-cpmem.c295 int bpp = 0, npb = 0, ro, go, bo, to; in ipu_cpmem_set_format_rgb() local
299 bo = rgb->bits_per_pixel - rgb->blue.length - rgb->blue.offset; in ipu_cpmem_set_format_rgb()
307 ipu_ch_param_write_field(ch, IPU_FIELD_OFS2, bo); in ipu_cpmem_set_format_rgb()
/linux-4.4.14/drivers/media/usb/dvb-usb/
Dtechnisat-usb2.c407 u8 bo = offset & 0xff; in technisat_usb2_eeprom_lrc_read() local
411 .buf = &bo, in technisat_usb2_eeprom_lrc_read()
/linux-4.4.14/arch/powerpc/lib/
Dsstep.c65 unsigned int bo = (instr >> 21) & 0x1f; in branch_taken() local
68 if ((bo & 4) == 0) { in branch_taken()
71 if (((bo >> 1) & 1) ^ (regs->ctr == 0)) in branch_taken()
74 if ((bo & 0x10) == 0) { in branch_taken()
77 if (((regs->ccr >> (31 - bi)) & 1) != ((bo >> 3) & 1)) in branch_taken()
/linux-4.4.14/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c60 ret = msm_gem_get_iova(gpu->rb->bo, gpu->id, &gpu->rb_iova); in adreno_hw_init()
/linux-4.4.14/arch/mips/include/asm/sn/sn0/
Dhubio.h882 bo: 1, /* 31: barrier op set in xtalk rqst*/ member
/linux-4.4.14/arch/powerpc/xmon/
Dppc-opc.c1559 #define BBO(op, bo, aa, lk) (B ((op), (aa), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21)) argument
1573 #define BBOCB(op, bo, cb, aa, lk) \ argument
1574 (BBO ((op), (bo), (aa), (lk)) | ((((unsigned long)(cb)) & 0x3) << 16))
1752 #define XLO(op, bo, xop, lk) \ argument
1753 (XLLK ((op), (xop), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21))
1763 #define XLOCB(op, bo, cb, xop, lk) \ argument
1764 (XLO ((op), (bo), (xop), (lk)) | ((((unsigned long)(cb)) & 3) << 16))
/linux-4.4.14/drivers/usb/host/
Doxu210hp-hcd.c3691 static const char * const bo[] = { in oxu_verify_id() local
3706 bo[(id & OXU_BO_MASK) >> OXU_BO_SHIFT], in oxu_verify_id()
/linux-4.4.14/net/wireless/
Dtrace.h188 #define BOOL_TO_STR(bo) (bo) ? "true" : "false" argument