Home
last modified time | relevance | path

Searched refs:bo (Results 1 – 128 of 128) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/radeon/
Dradeon_object.c43 static void radeon_bo_clear_surface_reg(struct radeon_bo *bo);
50 static void radeon_update_memory_usage(struct radeon_bo *bo, in radeon_update_memory_usage() argument
53 struct radeon_device *rdev = bo->rdev; in radeon_update_memory_usage()
54 u64 size = (u64)bo->tbo.num_pages << PAGE_SHIFT; in radeon_update_memory_usage()
74 struct radeon_bo *bo; in radeon_ttm_bo_destroy() local
76 bo = container_of(tbo, struct radeon_bo, tbo); in radeon_ttm_bo_destroy()
78 radeon_update_memory_usage(bo, bo->tbo.mem.mem_type, -1); in radeon_ttm_bo_destroy()
80 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
81 list_del_init(&bo->list); in radeon_ttm_bo_destroy()
82 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
[all …]
Dradeon_prime.c34 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_get_sg_table() local
35 int npages = bo->tbo.num_pages; in radeon_gem_prime_get_sg_table()
37 return drm_prime_pages_to_sg(bo->tbo.ttm->pages, npages); in radeon_gem_prime_get_sg_table()
42 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vmap() local
45 ret = ttm_bo_kmap(&bo->tbo, 0, bo->tbo.num_pages, in radeon_gem_prime_vmap()
46 &bo->dma_buf_vmap); in radeon_gem_prime_vmap()
50 return bo->dma_buf_vmap.virtual; in radeon_gem_prime_vmap()
55 struct radeon_bo *bo = gem_to_radeon_bo(obj); in radeon_gem_prime_vunmap() local
57 ttm_bo_kunmap(&bo->dma_buf_vmap); in radeon_gem_prime_vunmap()
66 struct radeon_bo *bo; in radeon_gem_prime_import_sg_table() local
[all …]
Dradeon_object.h64 static inline int radeon_bo_reserve(struct radeon_bo *bo, bool no_intr) in radeon_bo_reserve() argument
68 r = ttm_bo_reserve(&bo->tbo, !no_intr, false, false, NULL); in radeon_bo_reserve()
71 dev_err(bo->rdev->dev, "%p reserve failed\n", bo); in radeon_bo_reserve()
77 static inline void radeon_bo_unreserve(struct radeon_bo *bo) in radeon_bo_unreserve() argument
79 ttm_bo_unreserve(&bo->tbo); in radeon_bo_unreserve()
91 static inline u64 radeon_bo_gpu_offset(struct radeon_bo *bo) in radeon_bo_gpu_offset() argument
93 return bo->tbo.offset; in radeon_bo_gpu_offset()
96 static inline unsigned long radeon_bo_size(struct radeon_bo *bo) in radeon_bo_size() argument
98 return bo->tbo.num_pages << PAGE_SHIFT; in radeon_bo_size()
101 static inline unsigned radeon_bo_ngpu_pages(struct radeon_bo *bo) in radeon_bo_ngpu_pages() argument
[all …]
Dradeon_mn.c73 struct radeon_bo *bo, *next_bo; in radeon_mn_destroy() local
82 list_for_each_entry_safe(bo, next_bo, &node->bos, mn_list) { in radeon_mn_destroy()
83 bo->mn = NULL; in radeon_mn_destroy()
84 list_del_init(&bo->mn_list); in radeon_mn_destroy()
137 struct radeon_bo *bo; in radeon_mn_invalidate_range_start() local
143 list_for_each_entry(bo, &node->bos, mn_list) { in radeon_mn_invalidate_range_start()
145 if (!bo->tbo.ttm || bo->tbo.ttm->state != tt_bound) in radeon_mn_invalidate_range_start()
148 r = radeon_bo_reserve(bo, true); in radeon_mn_invalidate_range_start()
154 r = reservation_object_wait_timeout_rcu(bo->tbo.resv, in radeon_mn_invalidate_range_start()
159 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_CPU); in radeon_mn_invalidate_range_start()
[all …]
Dradeon_vm.c144 list[0].tv.bo = &vm->page_directory->tbo; in radeon_vm_get_bos()
150 if (!vm->page_tables[i].bo) in radeon_vm_get_bos()
153 list[idx].robj = vm->page_tables[i].bo; in radeon_vm_get_bos()
156 list[idx].tv.bo = &list[idx].robj->tbo; in radeon_vm_get_bos()
294 struct radeon_bo *bo) in radeon_vm_bo_find() argument
298 list_for_each_entry(bo_va, &bo->va, bo_list) { in radeon_vm_bo_find()
321 struct radeon_bo *bo) in radeon_vm_bo_add() argument
330 bo_va->bo = bo; in radeon_vm_bo_add()
340 list_add_tail(&bo_va->bo_list, &bo->va); in radeon_vm_bo_add()
389 struct radeon_bo *bo) in radeon_vm_clear_bo() argument
[all …]
Dradeon_ttm.c178 static void radeon_evict_flags(struct ttm_buffer_object *bo, in radeon_evict_flags() argument
189 if (!radeon_ttm_bo_is_radeon_bo(bo)) { in radeon_evict_flags()
196 rbo = container_of(bo, struct radeon_bo, tbo); in radeon_evict_flags()
197 switch (bo->mem.mem_type) { in radeon_evict_flags()
202 bo->mem.start < (rbo->rdev->mc.visible_vram_size >> PAGE_SHIFT)) { in radeon_evict_flags()
234 static int radeon_verify_access(struct ttm_buffer_object *bo, struct file *filp) in radeon_verify_access() argument
236 struct radeon_bo *rbo = container_of(bo, struct radeon_bo, tbo); in radeon_verify_access()
238 if (radeon_ttm_tt_has_userptr(bo->ttm)) in radeon_verify_access()
243 static void radeon_move_null(struct ttm_buffer_object *bo, in radeon_move_null() argument
246 struct ttm_mem_reg *old_mem = &bo->mem; in radeon_move_null()
[all …]
Dradeon_sa.c57 sa_manager->bo = NULL; in radeon_sa_bo_manager_init()
68 domain, flags, NULL, NULL, &sa_manager->bo); in radeon_sa_bo_manager_init()
92 radeon_bo_unref(&sa_manager->bo); in radeon_sa_bo_manager_fini()
101 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_start()
107 r = radeon_bo_reserve(sa_manager->bo, false); in radeon_sa_bo_manager_start()
112 r = radeon_bo_pin(sa_manager->bo, sa_manager->domain, &sa_manager->gpu_addr); in radeon_sa_bo_manager_start()
114 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start()
118 r = radeon_bo_kmap(sa_manager->bo, &sa_manager->cpu_ptr); in radeon_sa_bo_manager_start()
119 radeon_bo_unreserve(sa_manager->bo); in radeon_sa_bo_manager_start()
128 if (sa_manager->bo == NULL) { in radeon_sa_bo_manager_suspend()
[all …]
Dradeon_trace.h15 TP_PROTO(struct radeon_bo *bo),
16 TP_ARGS(bo),
18 __field(struct radeon_bo *, bo)
23 __entry->bo = bo;
24 __entry->pages = bo->tbo.num_pages;
26 TP_printk("bo=%p, pages=%u", __entry->bo, __entry->pages)
Dradeon_kfd.c38 struct radeon_bo *bo; member
215 RADEON_GEM_GTT_WC, NULL, NULL, &(*mem)->bo); in alloc_gtt_mem()
223 r = radeon_bo_reserve((*mem)->bo, true); in alloc_gtt_mem()
229 r = radeon_bo_pin((*mem)->bo, RADEON_GEM_DOMAIN_GTT, in alloc_gtt_mem()
237 r = radeon_bo_kmap((*mem)->bo, &(*mem)->cpu_ptr); in alloc_gtt_mem()
245 radeon_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
250 radeon_bo_unpin((*mem)->bo); in alloc_gtt_mem()
252 radeon_bo_unreserve((*mem)->bo); in alloc_gtt_mem()
254 radeon_bo_unref(&(*mem)->bo); in alloc_gtt_mem()
265 radeon_bo_reserve(mem->bo, true); in free_gtt_mem()
[all …]
Dradeon_gem.c287 struct radeon_bo *bo; in radeon_gem_userptr_ioctl() local
322 bo = gem_to_radeon_bo(gobj); in radeon_gem_userptr_ioctl()
323 r = radeon_ttm_tt_set_userptr(bo->tbo.ttm, args->addr, args->flags); in radeon_gem_userptr_ioctl()
328 r = radeon_mn_register(bo, args->addr); in radeon_gem_userptr_ioctl()
335 r = radeon_bo_reserve(bo, true); in radeon_gem_userptr_ioctl()
341 radeon_ttm_placement_from_domain(bo, RADEON_GEM_DOMAIN_GTT); in radeon_gem_userptr_ioctl()
342 r = ttm_bo_validate(&bo->tbo, &bo->placement, true, false); in radeon_gem_userptr_ioctl()
343 radeon_bo_unreserve(bo); in radeon_gem_userptr_ioctl()
546 tv.bo = &bo_va->bo->tbo; in radeon_gem_va_update_vm()
559 domain = radeon_mem_type_to_domain(entry->bo->mem.mem_type); in radeon_gem_va_update_vm()
[all …]
Dradeon_cs.c167 p->relocs[i].tv.bo = &p->relocs[i].robj->tbo; in radeon_cs_parser_relocs()
423 struct radeon_bo *bo = parser->relocs[i].robj; in radeon_cs_parser_fini() local
424 if (bo == NULL) in radeon_cs_parser_fini()
427 drm_gem_object_unreference_unlocked(&bo->gem_base); in radeon_cs_parser_fini()
499 &rdev->ring_tmp_bo.bo->tbo.mem); in radeon_bo_vm_update_pte()
504 struct radeon_bo *bo; in radeon_bo_vm_update_pte() local
506 bo = p->relocs[i].robj; in radeon_bo_vm_update_pte()
507 bo_va = radeon_vm_bo_find(vm, bo); in radeon_bo_vm_update_pte()
509 dev_err(rdev->dev, "bo %p not in vm %p\n", bo, vm); in radeon_bo_vm_update_pte()
513 r = radeon_vm_bo_update(rdev, bo_va, &bo->tbo.mem); in radeon_bo_vm_update_pte()
Dradeon_uvd.c421 static int radeon_uvd_cs_msg(struct radeon_cs_parser *p, struct radeon_bo *bo, in radeon_uvd_cs_msg() argument
436 f = reservation_object_get_excl(bo->tbo.resv); in radeon_uvd_cs_msg()
445 r = radeon_bo_kmap(bo, &ptr); in radeon_uvd_cs_msg()
467 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
493 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
515 radeon_bo_kunmap(bo); in radeon_uvd_cs_msg()
Dradeon_kms.c648 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); in radeon_driver_open_kms()
658 rdev->ring_tmp_bo.bo); in radeon_driver_open_kms()
697 r = radeon_bo_reserve(rdev->ring_tmp_bo.bo, false); in radeon_driver_postclose_kms()
701 radeon_bo_unreserve(rdev->ring_tmp_bo.bo); in radeon_driver_postclose_kms()
Dradeon.h436 struct radeon_bo *bo; member
481 struct radeon_bo *bo; member
542 struct radeon_bo *bo; member
918 struct radeon_bo *bo; member
1789 int radeon_mn_register(struct radeon_bo *bo, unsigned long addr);
1790 void radeon_mn_unregister(struct radeon_bo *bo);
1792 static inline int radeon_mn_register(struct radeon_bo *bo, unsigned long addr) in radeon_mn_register() argument
1796 static inline void radeon_mn_unregister(struct radeon_bo *bo) {} in radeon_mn_unregister() argument
2986 extern bool radeon_ttm_bo_is_radeon_bo(struct ttm_buffer_object *bo);
3029 struct radeon_bo *bo);
[all …]
Dradeon_pm.c146 struct radeon_bo *bo, *n; in radeon_unmap_vram_bos() local
151 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_unmap_vram_bos()
152 if (bo->tbo.mem.mem_type == TTM_PL_VRAM) in radeon_unmap_vram_bos()
153 ttm_bo_unmap_virtual(&bo->tbo); in radeon_unmap_vram_bos()
Dradeon_device.c219 if (rdev->surface_regs[i].bo) in radeon_surface_init()
220 radeon_bo_get_surface_reg(rdev->surface_regs[i].bo); in radeon_surface_init()
/linux-4.1.27/drivers/gpu/drm/qxl/
Dqxl_object.c32 struct qxl_bo *bo; in qxl_ttm_bo_destroy() local
35 bo = container_of(tbo, struct qxl_bo, tbo); in qxl_ttm_bo_destroy()
36 qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_ttm_bo_destroy()
38 qxl_surface_evict(qdev, bo, false); in qxl_ttm_bo_destroy()
40 list_del_init(&bo->list); in qxl_ttm_bo_destroy()
42 drm_gem_object_release(&bo->gem_base); in qxl_ttm_bo_destroy()
43 kfree(bo); in qxl_ttm_bo_destroy()
46 bool qxl_ttm_bo_is_qxl_bo(struct ttm_buffer_object *bo) in qxl_ttm_bo_is_qxl_bo() argument
48 if (bo->destroy == &qxl_ttm_bo_destroy) in qxl_ttm_bo_is_qxl_bo()
83 struct qxl_bo *bo; in qxl_bo_create() local
[all …]
Dqxl_object.h30 static inline int qxl_bo_reserve(struct qxl_bo *bo, bool no_wait) in qxl_bo_reserve() argument
34 r = ttm_bo_reserve(&bo->tbo, true, no_wait, false, NULL); in qxl_bo_reserve()
37 struct qxl_device *qdev = (struct qxl_device *)bo->gem_base.dev->dev_private; in qxl_bo_reserve()
38 dev_err(qdev->dev, "%p reserve failed\n", bo); in qxl_bo_reserve()
45 static inline void qxl_bo_unreserve(struct qxl_bo *bo) in qxl_bo_unreserve() argument
47 ttm_bo_unreserve(&bo->tbo); in qxl_bo_unreserve()
50 static inline u64 qxl_bo_gpu_offset(struct qxl_bo *bo) in qxl_bo_gpu_offset() argument
52 return bo->tbo.offset; in qxl_bo_gpu_offset()
55 static inline unsigned long qxl_bo_size(struct qxl_bo *bo) in qxl_bo_size() argument
57 return bo->tbo.num_pages << PAGE_SHIFT; in qxl_bo_size()
[all …]
Dqxl_release.c166 struct qxl_bo *bo; in qxl_release_free_list() local
170 bo = to_qxl_bo(entry->tv.bo); in qxl_release_free_list()
171 qxl_bo_unref(&bo); in qxl_release_free_list()
204 struct qxl_bo **bo) in qxl_release_bo_alloc() argument
210 bo); in qxl_release_bo_alloc()
214 int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo) in qxl_release_list_add() argument
219 if (entry->tv.bo == &bo->tbo) in qxl_release_list_add()
227 qxl_bo_ref(bo); in qxl_release_list_add()
228 entry->tv.bo = &bo->tbo; in qxl_release_list_add()
234 static int qxl_release_validate_bo(struct qxl_bo *bo) in qxl_release_validate_bo() argument
[all …]
Dqxl_ttm.c111 struct ttm_buffer_object *bo; in qxl_ttm_fault() local
114 bo = (struct ttm_buffer_object *)vma->vm_private_data; in qxl_ttm_fault()
115 if (bo == NULL) in qxl_ttm_fault()
187 static void qxl_evict_flags(struct ttm_buffer_object *bo, in qxl_evict_flags() argument
197 if (!qxl_ttm_bo_is_qxl_bo(bo)) { in qxl_evict_flags()
204 qbo = container_of(bo, struct qxl_bo, tbo); in qxl_evict_flags()
209 static int qxl_verify_access(struct ttm_buffer_object *bo, struct file *filp) in qxl_verify_access() argument
211 struct qxl_bo *qbo = to_qxl_bo(bo); in qxl_verify_access()
337 static void qxl_move_null(struct ttm_buffer_object *bo, in qxl_move_null() argument
340 struct ttm_mem_reg *old_mem = &bo->mem; in qxl_move_null()
[all …]
Dqxl_display.c228 struct qxl_bo *bo = gem_to_qxl_bo(qfb_src->obj); in qxl_crtc_page_flip() local
242 bo->is_primary = true; in qxl_crtc_page_flip()
244 ret = qxl_bo_reserve(bo, false); in qxl_crtc_page_flip()
248 qxl_draw_dirty_fb(qdev, qfb_src, bo, 0, 0, in qxl_crtc_page_flip()
260 qxl_bo_unreserve(bo); in qxl_crtc_page_flip()
604 struct qxl_bo *bo, *old_bo = NULL; in qxl_crtc_mode_set() local
619 bo = gem_to_qxl_bo(qfb->obj); in qxl_crtc_mode_set()
626 if (bo->is_primary == false) in qxl_crtc_mode_set()
629 if (bo->surf.stride * bo->surf.height > qdev->vram_size) { in qxl_crtc_mode_set()
634 ret = qxl_bo_reserve(bo, false); in qxl_crtc_mode_set()
[all …]
Dqxl_debugfs.c58 struct qxl_bo *bo; in qxl_debugfs_buffers_info() local
60 list_for_each_entry(bo, &qdev->gem.objects, list) { in qxl_debugfs_buffers_info()
65 fobj = rcu_dereference(bo->tbo.resv->fence); in qxl_debugfs_buffers_info()
70 (unsigned long)bo->gem_base.size, in qxl_debugfs_buffers_info()
71 bo->pin_count, rel); in qxl_debugfs_buffers_info()
Dqxl_image.c45 ret = qxl_alloc_bo_reserved(qdev, release, chunk_size, &chunk->bo); in qxl_allocate_chunk()
70 ret = qxl_alloc_bo_reserved(qdev, release, sizeof(struct qxl_image), &image->bo); in qxl_image_alloc_objects()
78 qxl_bo_unref(&image->bo); in qxl_image_alloc_objects()
91 qxl_bo_unref(&chunk->bo); in qxl_image_free_objects()
95 qxl_bo_unref(&dimage->bo); in qxl_image_free_objects()
122 chunk_bo = drv_chunk->bo; in qxl_image_init_helper()
189 image_bo = dimage->bo; in qxl_image_init_helper()
Dqxl_cmd.c185 cmd.data = qxl_bo_physical_address(qdev, to_qxl_bo(entry->tv.bo), release->release_offset); in qxl_push_command_ring_release()
198 cmd.data = qxl_bo_physical_address(qdev, to_qxl_bo(entry->tv.bo), release->release_offset); in qxl_push_cursor_ring_release()
261 struct qxl_bo *bo; in qxl_alloc_bo_reserved() local
265 false, QXL_GEM_DOMAIN_VRAM, NULL, &bo); in qxl_alloc_bo_reserved()
270 ret = qxl_release_list_add(release, bo); in qxl_alloc_bo_reserved()
274 *_bo = bo; in qxl_alloc_bo_reserved()
277 qxl_bo_unref(&bo); in qxl_alloc_bo_reserved()
380 unsigned offset, struct qxl_bo *bo) in qxl_io_create_primary() argument
387 create->format = bo->surf.format; in qxl_io_create_primary()
388 create->width = bo->surf.width; in qxl_io_create_primary()
[all …]
Dqxl_drv.h204 struct qxl_bo *bo; member
208 struct qxl_bo *bo; member
371 qxl_bo_physical_address(struct qxl_device *qdev, struct qxl_bo *bo, in qxl_bo_physical_address() argument
374 int slot_id = bo->type == QXL_GEM_DOMAIN_VRAM ? qdev->main_mem_slot : qdev->surfaces_mem_slot; in qxl_bo_physical_address()
378 return slot->high_bits | (bo->tbo.offset + offset); in qxl_bo_physical_address()
426 int qxl_bo_kmap(struct qxl_bo *bo, void **ptr);
463 struct qxl_bo *bo);
482 int qxl_release_list_add(struct qxl_release *release, struct qxl_bo *bo);
512 struct qxl_bo *bo,
573 int qxl_bo_check_id(struct qxl_device *qdev, struct qxl_bo *bo);
Dqxl_draw.c221 ptr = qxl_bo_kmap_atomic_page(qdev, dimage->bo, 0); in qxl_draw_opaque_fb()
225 qxl_bo_kunmap_atomic_page(qdev, dimage->bo, ptr); in qxl_draw_opaque_fb()
243 qxl_bo_physical_address(qdev, dimage->bo, 0); in qxl_draw_opaque_fb()
268 struct qxl_bo *bo, in qxl_draw_dirty_fb() argument
341 ret = qxl_bo_kmap(bo, (void **)&surface_base); in qxl_draw_dirty_fb()
348 qxl_bo_kunmap(bo); in qxl_draw_dirty_fb()
374 drawable->u.copy.src_bitmap = qxl_bo_physical_address(qdev, dimage->bo, 0); in qxl_draw_dirty_fb()
/linux-4.1.27/drivers/gpu/drm/tegra/
Dgem.c23 static inline struct tegra_bo *host1x_to_tegra_bo(struct host1x_bo *bo) in host1x_to_tegra_bo() argument
25 return container_of(bo, struct tegra_bo, base); in host1x_to_tegra_bo()
28 static void tegra_bo_put(struct host1x_bo *bo) in tegra_bo_put() argument
30 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_put()
38 static dma_addr_t tegra_bo_pin(struct host1x_bo *bo, struct sg_table **sgt) in tegra_bo_pin() argument
40 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_pin()
45 static void tegra_bo_unpin(struct host1x_bo *bo, struct sg_table *sgt) in tegra_bo_unpin() argument
49 static void *tegra_bo_mmap(struct host1x_bo *bo) in tegra_bo_mmap() argument
51 struct tegra_bo *obj = host1x_to_tegra_bo(bo); in tegra_bo_mmap()
56 static void tegra_bo_munmap(struct host1x_bo *bo, void *addr) in tegra_bo_munmap() argument
[all …]
Dfb.c66 struct tegra_bo *bo = fb->planes[i]; in tegra_fb_destroy() local
68 if (bo) { in tegra_fb_destroy()
69 if (bo->pages && bo->vaddr) in tegra_fb_destroy()
70 vunmap(bo->vaddr); in tegra_fb_destroy()
72 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fb_destroy()
208 struct tegra_bo *bo; in tegra_fbdev_probe() local
223 bo = tegra_bo_create(drm, size, 0); in tegra_fbdev_probe()
224 if (IS_ERR(bo)) in tegra_fbdev_probe()
225 return PTR_ERR(bo); in tegra_fbdev_probe()
230 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe()
[all …]
Ddrm.c265 struct tegra_bo *bo; in host1x_bo_lookup() local
275 bo = to_tegra_bo(gem); in host1x_bo_lookup()
276 return &bo->base; in host1x_bo_lookup()
307 dest->cmdbuf.bo = host1x_bo_lookup(drm, file, cmdbuf); in host1x_reloc_copy_from_user()
308 if (!dest->cmdbuf.bo) in host1x_reloc_copy_from_user()
311 dest->target.bo = host1x_bo_lookup(drm, file, target); in host1x_reloc_copy_from_user()
312 if (!dest->target.bo) in host1x_reloc_copy_from_user()
352 struct host1x_bo *bo; in tegra_drm_submit() local
359 bo = host1x_bo_lookup(drm, file, cmdbuf.handle); in tegra_drm_submit()
360 if (!bo) { in tegra_drm_submit()
[all …]
Ddc.c584 struct tegra_bo *bo = tegra_fb_get_plane(fb, i); in tegra_plane_atomic_update() local
586 window.base[i] = bo->paddr + fb->offsets[i]; in tegra_plane_atomic_update()
705 struct tegra_bo *bo = tegra_fb_get_plane(plane->state->fb, 0); in tegra_cursor_atomic_update() local
737 value |= (bo->paddr >> 10) & 0x3fffff; in tegra_cursor_atomic_update()
741 value = (bo->paddr >> 32) & 0x3; in tegra_cursor_atomic_update()
951 struct tegra_bo *bo; in tegra_dc_finish_page_flip() local
960 bo = tegra_fb_get_plane(crtc->primary->fb, 0); in tegra_dc_finish_page_flip()
972 if (base == bo->paddr + crtc->primary->fb->offsets[0]) { in tegra_dc_finish_page_flip()
/linux-4.1.27/drivers/gpu/drm/ttm/
Dttm_bo.c85 static void ttm_bo_mem_space_debug(struct ttm_buffer_object *bo, in ttm_bo_mem_space_debug() argument
91 bo, bo->mem.num_pages, bo->mem.size >> 10, in ttm_bo_mem_space_debug()
92 bo->mem.size >> 20); in ttm_bo_mem_space_debug()
100 ttm_mem_type_debug(bo->bdev, mem_type); in ttm_bo_mem_space_debug()
138 struct ttm_buffer_object *bo = in ttm_bo_release_list() local
140 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_release_list()
141 size_t acc_size = bo->acc_size; in ttm_bo_release_list()
143 BUG_ON(atomic_read(&bo->list_kref.refcount)); in ttm_bo_release_list()
144 BUG_ON(atomic_read(&bo->kref.refcount)); in ttm_bo_release_list()
145 BUG_ON(atomic_read(&bo->cpu_writers)); in ttm_bo_release_list()
[all …]
Dttm_bo_vm.c44 static int ttm_bo_vm_fault_idle(struct ttm_buffer_object *bo, in ttm_bo_vm_fault_idle() argument
50 if (likely(!test_bit(TTM_BO_PRIV_FLAG_MOVING, &bo->priv_flags))) in ttm_bo_vm_fault_idle()
56 ret = ttm_bo_wait(bo, false, false, true); in ttm_bo_vm_fault_idle()
70 (void) ttm_bo_wait(bo, false, true, false); in ttm_bo_vm_fault_idle()
77 ret = ttm_bo_wait(bo, false, true, false); in ttm_bo_vm_fault_idle()
88 struct ttm_buffer_object *bo = (struct ttm_buffer_object *) in ttm_bo_vm_fault() local
90 struct ttm_bo_device *bdev = bo->bdev; in ttm_bo_vm_fault()
101 &bdev->man[bo->mem.mem_type]; in ttm_bo_vm_fault()
110 ret = ttm_bo_reserve(bo, true, true, false, NULL); in ttm_bo_vm_fault()
118 (void) ttm_bo_wait_unreserved(bo); in ttm_bo_vm_fault()
[all …]
Dttm_execbuf_util.c39 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation_reverse() local
41 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation_reverse()
50 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_del_from_lru_locked() local
51 unsigned put_count = ttm_bo_del_from_lru(bo); in ttm_eu_del_from_lru_locked()
53 ttm_bo_list_ref_sub(bo, put_count, true); in ttm_eu_del_from_lru_locked()
67 glob = entry->bo->glob; in ttm_eu_backoff_reservation()
71 struct ttm_buffer_object *bo = entry->bo; in ttm_eu_backoff_reservation() local
73 ttm_bo_add_to_lru(bo); in ttm_eu_backoff_reservation()
74 __ttm_bo_unreserve(bo); in ttm_eu_backoff_reservation()
107 glob = entry->bo->glob; in ttm_eu_reserve_buffers()
[all …]
Dttm_bo_util.c42 void ttm_bo_free_old_node(struct ttm_buffer_object *bo) in ttm_bo_free_old_node() argument
44 ttm_bo_mem_put(bo, &bo->mem); in ttm_bo_free_old_node()
47 int ttm_bo_move_ttm(struct ttm_buffer_object *bo, in ttm_bo_move_ttm() argument
51 struct ttm_tt *ttm = bo->ttm; in ttm_bo_move_ttm()
52 struct ttm_mem_reg *old_mem = &bo->mem; in ttm_bo_move_ttm()
57 ttm_bo_free_old_node(bo); in ttm_bo_move_ttm()
104 struct ttm_buffer_object *bo; in ttm_mem_io_evict() local
109 bo = list_first_entry(&man->io_reserve_lru, in ttm_mem_io_evict()
112 list_del_init(&bo->io_reserve_lru); in ttm_mem_io_evict()
113 ttm_bo_unmap_virtual_locked(bo); in ttm_mem_io_evict()
[all …]
Dttm_bo_manager.c51 struct ttm_buffer_object *bo, in ttm_bo_man_get_node() argument
/linux-4.1.27/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c97 struct mgag200_bo *bo; in mgag200_bo_ttm_destroy() local
99 bo = container_of(tbo, struct mgag200_bo, bo); in mgag200_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in mgag200_bo_ttm_destroy()
102 kfree(bo); in mgag200_bo_ttm_destroy()
105 static bool mgag200_ttm_bo_is_mgag200_bo(struct ttm_buffer_object *bo) in mgag200_ttm_bo_is_mgag200_bo() argument
107 if (bo->destroy == &mgag200_bo_ttm_destroy) in mgag200_ttm_bo_is_mgag200_bo()
138 mgag200_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in mgag200_bo_evict_flags() argument
140 struct mgag200_bo *mgabo = mgag200_bo(bo); in mgag200_bo_evict_flags()
142 if (!mgag200_ttm_bo_is_mgag200_bo(bo)) in mgag200_bo_evict_flags()
149 static int mgag200_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in mgag200_bo_verify_access() argument
[all …]
Dmgag200_cursor.c44 struct mgag200_bo *bo = NULL; in mga_crtc_cursor_set() local
119 bo = gem_to_mga_bo(obj); in mga_crtc_cursor_set()
120 ret = mgag200_bo_reserve(bo, true); in mga_crtc_cursor_set()
125 if (!bo->kmap.virtual) { in mga_crtc_cursor_set()
126 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_crtc_cursor_set()
136 this_colour = ioread32(bo->kmap.virtual + i); in mga_crtc_cursor_set()
189 ret = ttm_bo_kmap(&pixels_prev->bo, 0, in mga_crtc_cursor_set()
190 pixels_prev->bo.num_pages, in mga_crtc_cursor_set()
202 this_colour = ioread32(bo->kmap.virtual + 4*(col + 64*row)); in mga_crtc_cursor_set()
248 ttm_bo_kunmap(&bo->kmap); in mga_crtc_cursor_set()
[all …]
Dmgag200_drv.h223 struct ttm_buffer_object bo; member
233 mgag200_bo(struct ttm_buffer_object *bo) in mgag200_bo() argument
235 return container_of(bo, struct mgag200_bo, bo); in mgag200_bo()
277 void mgag200_ttm_placement(struct mgag200_bo *bo, int domain);
279 static inline int mgag200_bo_reserve(struct mgag200_bo *bo, bool no_wait) in mgag200_bo_reserve() argument
283 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in mgag200_bo_reserve()
286 DRM_ERROR("reserve failed %p\n", bo); in mgag200_bo_reserve()
292 static inline void mgag200_bo_unreserve(struct mgag200_bo *bo) in mgag200_bo_unreserve() argument
294 ttm_bo_unreserve(&bo->bo); in mgag200_bo_unreserve()
302 int mgag200_bo_pin(struct mgag200_bo *bo, u32 pl_flag, u64 *gpu_addr);
[all …]
Dmgag200_fb.c27 struct mgag200_bo *bo; in mga_dirty_update() local
37 bo = gem_to_mga_bo(obj); in mga_dirty_update()
45 ret = mgag200_bo_reserve(bo, true); in mga_dirty_update()
79 if (!bo->kmap.virtual) { in mga_dirty_update()
80 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_dirty_update()
83 mgag200_bo_unreserve(bo); in mga_dirty_update()
91 memcpy_toio(bo->kmap.virtual + src_offset, mfbdev->sysram + src_offset, (x2 - x + 1) * bpp); in mga_dirty_update()
95 ttm_bo_kunmap(&bo->kmap); in mga_dirty_update()
97 mgag200_bo_unreserve(bo); in mga_dirty_update()
170 struct mgag200_bo *bo; in mgag200fb_create() local
[all …]
Dmgag200_main.c316 static void mgag200_bo_unref(struct mgag200_bo **bo) in mgag200_bo_unref() argument
320 if ((*bo) == NULL) in mgag200_bo_unref()
323 tbo = &((*bo)->bo); in mgag200_bo_unref()
325 *bo = NULL; in mgag200_bo_unref()
336 static inline u64 mgag200_bo_mmap_offset(struct mgag200_bo *bo) in mgag200_bo_mmap_offset() argument
338 return drm_vma_node_offset_addr(&bo->bo.vma_node); in mgag200_bo_mmap_offset()
349 struct mgag200_bo *bo; in mgag200_dumb_mmap_offset() local
358 bo = gem_to_mga_bo(obj); in mgag200_dumb_mmap_offset()
359 *offset = mgag200_bo_mmap_offset(bo); in mgag200_dumb_mmap_offset()
Dmgag200_mode.c730 struct mgag200_bo *bo; in mga_crtc_do_set_base() local
738 bo = gem_to_mga_bo(obj); in mga_crtc_do_set_base()
739 ret = mgag200_bo_reserve(bo, false); in mga_crtc_do_set_base()
742 mgag200_bo_push_sysram(bo); in mga_crtc_do_set_base()
743 mgag200_bo_unreserve(bo); in mga_crtc_do_set_base()
748 bo = gem_to_mga_bo(obj); in mga_crtc_do_set_base()
750 ret = mgag200_bo_reserve(bo, false); in mga_crtc_do_set_base()
754 ret = mgag200_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in mga_crtc_do_set_base()
756 mgag200_bo_unreserve(bo); in mga_crtc_do_set_base()
762 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in mga_crtc_do_set_base()
[all …]
/linux-4.1.27/drivers/gpu/drm/ast/
Dast_ttm.c97 struct ast_bo *bo; in ast_bo_ttm_destroy() local
99 bo = container_of(tbo, struct ast_bo, bo); in ast_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in ast_bo_ttm_destroy()
102 kfree(bo); in ast_bo_ttm_destroy()
105 static bool ast_ttm_bo_is_ast_bo(struct ttm_buffer_object *bo) in ast_ttm_bo_is_ast_bo() argument
107 if (bo->destroy == &ast_bo_ttm_destroy) in ast_ttm_bo_is_ast_bo()
138 ast_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in ast_bo_evict_flags() argument
140 struct ast_bo *astbo = ast_bo(bo); in ast_bo_evict_flags()
142 if (!ast_ttm_bo_is_ast_bo(bo)) in ast_bo_evict_flags()
149 static int ast_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in ast_bo_verify_access() argument
[all …]
Dast_drv.h320 struct ttm_buffer_object bo; member
330 ast_bo(struct ttm_buffer_object *bo) in ast_bo() argument
332 return container_of(bo, struct ast_bo, bo); in ast_bo()
363 int ast_bo_pin(struct ast_bo *bo, u32 pl_flag, u64 *gpu_addr);
364 int ast_bo_unpin(struct ast_bo *bo);
366 static inline int ast_bo_reserve(struct ast_bo *bo, bool no_wait) in ast_bo_reserve() argument
370 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in ast_bo_reserve()
373 DRM_ERROR("reserve failed %p\n", bo); in ast_bo_reserve()
379 static inline void ast_bo_unreserve(struct ast_bo *bo) in ast_bo_unreserve() argument
381 ttm_bo_unreserve(&bo->bo); in ast_bo_unreserve()
[all …]
Dast_fb.c51 struct ast_bo *bo; in ast_dirty_update() local
61 bo = gem_to_ast_bo(obj); in ast_dirty_update()
69 ret = ast_bo_reserve(bo, true); in ast_dirty_update()
103 if (!bo->kmap.virtual) { in ast_dirty_update()
104 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_dirty_update()
107 ast_bo_unreserve(bo); in ast_dirty_update()
115 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, (x2 - x + 1) * bpp); in ast_dirty_update()
119 ttm_bo_kunmap(&bo->kmap); in ast_dirty_update()
121 ast_bo_unreserve(bo); in ast_dirty_update()
199 struct ast_bo *bo = NULL; in astfb_create() local
[all …]
Dast_mode.c515 struct ast_bo *bo; in ast_crtc_do_set_base() local
523 bo = gem_to_ast_bo(obj); in ast_crtc_do_set_base()
524 ret = ast_bo_reserve(bo, false); in ast_crtc_do_set_base()
527 ast_bo_push_sysram(bo); in ast_crtc_do_set_base()
528 ast_bo_unreserve(bo); in ast_crtc_do_set_base()
533 bo = gem_to_ast_bo(obj); in ast_crtc_do_set_base()
535 ret = ast_bo_reserve(bo, false); in ast_crtc_do_set_base()
539 ret = ast_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in ast_crtc_do_set_base()
541 ast_bo_unreserve(bo); in ast_crtc_do_set_base()
547 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in ast_crtc_do_set_base()
[all …]
Dast_main.c543 static void ast_bo_unref(struct ast_bo **bo) in ast_bo_unref() argument
547 if ((*bo) == NULL) in ast_bo_unref()
550 tbo = &((*bo)->bo); in ast_bo_unref()
552 *bo = NULL; in ast_bo_unref()
563 static inline u64 ast_bo_mmap_offset(struct ast_bo *bo) in ast_bo_mmap_offset() argument
565 return drm_vma_node_offset_addr(&bo->bo.vma_node); in ast_bo_mmap_offset()
575 struct ast_bo *bo; in ast_dumb_mmap_offset() local
584 bo = gem_to_ast_bo(obj); in ast_dumb_mmap_offset()
585 *offset = ast_bo_mmap_offset(bo); in ast_dumb_mmap_offset()
/linux-4.1.27/drivers/gpu/drm/cirrus/
Dcirrus_ttm.c97 struct cirrus_bo *bo; in cirrus_bo_ttm_destroy() local
99 bo = container_of(tbo, struct cirrus_bo, bo); in cirrus_bo_ttm_destroy()
101 drm_gem_object_release(&bo->gem); in cirrus_bo_ttm_destroy()
102 kfree(bo); in cirrus_bo_ttm_destroy()
105 static bool cirrus_ttm_bo_is_cirrus_bo(struct ttm_buffer_object *bo) in cirrus_ttm_bo_is_cirrus_bo() argument
107 if (bo->destroy == &cirrus_bo_ttm_destroy) in cirrus_ttm_bo_is_cirrus_bo()
138 cirrus_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in cirrus_bo_evict_flags() argument
140 struct cirrus_bo *cirrusbo = cirrus_bo(bo); in cirrus_bo_evict_flags()
142 if (!cirrus_ttm_bo_is_cirrus_bo(bo)) in cirrus_bo_evict_flags()
149 static int cirrus_bo_verify_access(struct ttm_buffer_object *bo, struct file *filp) in cirrus_bo_verify_access() argument
[all …]
Dcirrus_drv.h164 struct ttm_buffer_object bo; member
174 cirrus_bo(struct ttm_buffer_object *bo) in cirrus_bo() argument
176 return container_of(bo, struct cirrus_bo, bo); in cirrus_bo()
240 void cirrus_ttm_placement(struct cirrus_bo *bo, int domain);
245 static inline int cirrus_bo_reserve(struct cirrus_bo *bo, bool no_wait) in cirrus_bo_reserve() argument
249 ret = ttm_bo_reserve(&bo->bo, true, no_wait, false, NULL); in cirrus_bo_reserve()
252 DRM_ERROR("reserve failed %p\n", bo); in cirrus_bo_reserve()
258 static inline void cirrus_bo_unreserve(struct cirrus_bo *bo) in cirrus_bo_unreserve() argument
260 ttm_bo_unreserve(&bo->bo); in cirrus_bo_unreserve()
263 int cirrus_bo_push_sysram(struct cirrus_bo *bo);
[all …]
Dcirrus_fbdev.c25 struct cirrus_bo *bo; in cirrus_dirty_update() local
35 bo = gem_to_cirrus_bo(obj); in cirrus_dirty_update()
43 ret = cirrus_bo_reserve(bo, true); in cirrus_dirty_update()
76 if (!bo->kmap.virtual) { in cirrus_dirty_update()
77 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_dirty_update()
80 cirrus_bo_unreserve(bo); in cirrus_dirty_update()
88 memcpy_toio(bo->kmap.virtual + src_offset, afbdev->sysram + src_offset, width * bpp); in cirrus_dirty_update()
92 ttm_bo_kunmap(&bo->kmap); in cirrus_dirty_update()
94 cirrus_bo_unreserve(bo); in cirrus_dirty_update()
176 struct cirrus_bo *bo = NULL; in cirrusfb_create() local
[all …]
Dcirrus_main.c264 static void cirrus_bo_unref(struct cirrus_bo **bo) in cirrus_bo_unref() argument
268 if ((*bo) == NULL) in cirrus_bo_unref()
271 tbo = &((*bo)->bo); in cirrus_bo_unref()
273 *bo = NULL; in cirrus_bo_unref()
284 static inline u64 cirrus_bo_mmap_offset(struct cirrus_bo *bo) in cirrus_bo_mmap_offset() argument
286 return drm_vma_node_offset_addr(&bo->bo.vma_node); in cirrus_bo_mmap_offset()
297 struct cirrus_bo *bo; in cirrus_dumb_mmap_offset() local
306 bo = gem_to_cirrus_bo(obj); in cirrus_dumb_mmap_offset()
307 *offset = cirrus_bo_mmap_offset(bo); in cirrus_dumb_mmap_offset()
Dcirrus_mode.c137 struct cirrus_bo *bo; in cirrus_crtc_do_set_base() local
145 bo = gem_to_cirrus_bo(obj); in cirrus_crtc_do_set_base()
146 ret = cirrus_bo_reserve(bo, false); in cirrus_crtc_do_set_base()
149 cirrus_bo_push_sysram(bo); in cirrus_crtc_do_set_base()
150 cirrus_bo_unreserve(bo); in cirrus_crtc_do_set_base()
155 bo = gem_to_cirrus_bo(obj); in cirrus_crtc_do_set_base()
157 ret = cirrus_bo_reserve(bo, false); in cirrus_crtc_do_set_base()
161 ret = cirrus_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in cirrus_crtc_do_set_base()
163 cirrus_bo_unreserve(bo); in cirrus_crtc_do_set_base()
169 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, &bo->kmap); in cirrus_crtc_do_set_base()
[all …]
/linux-4.1.27/drivers/gpu/drm/vmwgfx/
Dvmwgfx_dmabuf.c55 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_placement() local
64 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL); in vmw_dmabuf_to_placement()
68 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_placement()
70 ttm_bo_unreserve(bo); in vmw_dmabuf_to_placement()
97 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_vram_or_gmr() local
108 ret = ttm_bo_reserve(bo, interruptible, false, false, NULL); in vmw_dmabuf_to_vram_or_gmr()
124 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_vram_or_gmr()
139 ret = ttm_bo_validate(bo, placement, interruptible, false); in vmw_dmabuf_to_vram_or_gmr()
142 ttm_bo_unreserve(bo); in vmw_dmabuf_to_vram_or_gmr()
199 struct ttm_buffer_object *bo = &buf->base; in vmw_dmabuf_to_start_of_vram() local
[all …]
Dvmwgfx_resource.c73 vmw_dma_buffer(struct ttm_buffer_object *bo) in vmw_dma_buffer() argument
75 return container_of(bo, struct vmw_dma_buffer, base); in vmw_dma_buffer()
79 vmw_user_dma_buffer(struct ttm_buffer_object *bo) in vmw_user_dma_buffer() argument
81 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_user_dma_buffer()
128 struct ttm_buffer_object *bo = &res->backup->base; in vmw_resource_release() local
130 ttm_bo_reserve(bo, false, false, false, NULL); in vmw_resource_release()
135 val_buf.bo = bo; in vmw_resource_release()
141 ttm_bo_unreserve(bo); in vmw_resource_release()
398 void vmw_dmabuf_bo_free(struct ttm_buffer_object *bo) in vmw_dmabuf_bo_free() argument
400 struct vmw_dma_buffer *vmw_bo = vmw_dma_buffer(bo); in vmw_dmabuf_bo_free()
[all …]
Dvmwgfx_mob.c184 struct ttm_buffer_object *bo; in vmw_takedown_otable_base() local
189 bo = otable->page_table->pt_bo; in vmw_takedown_otable_base()
206 if (bo) { in vmw_takedown_otable_base()
209 ret = ttm_bo_reserve(bo, false, true, false, NULL); in vmw_takedown_otable_base()
212 vmw_fence_single_bo(bo, NULL); in vmw_takedown_otable_base()
213 ttm_bo_unreserve(bo); in vmw_takedown_otable_base()
321 struct ttm_buffer_object *bo = dev_priv->otable_bo; in vmw_otables_takedown() local
328 ret = ttm_bo_reserve(bo, false, true, false, NULL); in vmw_otables_takedown()
331 vmw_fence_single_bo(bo, NULL); in vmw_otables_takedown()
332 ttm_bo_unreserve(bo); in vmw_otables_takedown()
[all …]
Dvmwgfx_buffer.c501 int vmw_bo_map_dma(struct ttm_buffer_object *bo) in vmw_bo_map_dma() argument
504 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_map_dma()
518 void vmw_bo_unmap_dma(struct ttm_buffer_object *bo) in vmw_bo_unmap_dma() argument
521 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_unmap_dma()
538 const struct vmw_sg_table *vmw_bo_sg_table(struct ttm_buffer_object *bo) in vmw_bo_sg_table() argument
541 container_of(bo->ttm, struct vmw_ttm_tt, dma_ttm.ttm); in vmw_bo_sg_table()
753 static void vmw_evict_flags(struct ttm_buffer_object *bo, in vmw_evict_flags() argument
759 static int vmw_verify_access(struct ttm_buffer_object *bo, struct file *filp) in vmw_verify_access() argument
764 return vmw_user_dmabuf_verify_access(bo, tfile); in vmw_verify_access()
799 static int vmw_ttm_fault_reserve_notify(struct ttm_buffer_object *bo) in vmw_ttm_fault_reserve_notify() argument
[all …]
Dvmwgfx_drv.h626 extern void vmw_dmabuf_bo_free(struct ttm_buffer_object *bo);
631 void (*bo_free) (struct ttm_buffer_object *bo));
632 extern int vmw_user_dmabuf_verify_access(struct ttm_buffer_object *bo,
650 extern uint32_t vmw_dmabuf_validate_node(struct ttm_buffer_object *bo,
652 extern void vmw_dmabuf_validate_clear(struct ttm_buffer_object *bo);
667 extern void vmw_resource_move_notify(struct ttm_buffer_object *bo,
669 extern void vmw_fence_single_bo(struct ttm_buffer_object *bo,
677 struct vmw_dma_buffer *bo,
687 struct vmw_dma_buffer *bo,
690 struct vmw_dma_buffer *bo,
[all …]
Dvmwgfx_gmrid_manager.c48 struct ttm_buffer_object *bo, in vmw_gmrid_man_get_node() argument
62 gman->used_gmr_pages += bo->num_pages; in vmw_gmrid_man_get_node()
86 mem->num_pages = bo->num_pages; in vmw_gmrid_man_get_node()
96 gman->used_gmr_pages -= bo->num_pages; in vmw_gmrid_man_get_node()
Dvmwgfx_execbuf.c311 struct ttm_buffer_object *bo, in vmw_bo_to_validate_list() argument
321 if (likely(drm_ht_find_item(&sw_context->res_ht, (unsigned long) bo, in vmw_bo_to_validate_list()
339 vval_buf->hash.key = (unsigned long) bo; in vmw_bo_to_validate_list()
348 val_buf->bo = ttm_bo_reference(bo); in vmw_bo_to_validate_list()
383 struct ttm_buffer_object *bo = &res->backup->base; in vmw_resources_reserve() local
386 (sw_context, bo, in vmw_resources_reserve()
885 struct ttm_buffer_object *bo; in vmw_translate_mob_ptr() local
897 bo = &vmw_bo->base; in vmw_translate_mob_ptr()
910 ret = vmw_bo_to_validate_list(sw_context, bo, true, &reloc->index); in vmw_translate_mob_ptr()
948 struct ttm_buffer_object *bo; in vmw_translate_guest_ptr() local
[all …]
Dvmwgfx_fifo.c548 struct ttm_buffer_object *bo = dev_priv->dummy_query_bo; in vmw_fifo_emit_dummy_legacy_query() local
566 if (bo->mem.mem_type == TTM_PL_VRAM) { in vmw_fifo_emit_dummy_legacy_query()
568 cmd->body.guestResult.offset = bo->offset; in vmw_fifo_emit_dummy_legacy_query()
570 cmd->body.guestResult.gmrId = bo->mem.start; in vmw_fifo_emit_dummy_legacy_query()
597 struct ttm_buffer_object *bo = dev_priv->dummy_query_bo; in vmw_fifo_emit_dummy_gb_query() local
614 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_fifo_emit_dummy_gb_query()
615 cmd->body.mobid = bo->mem.start; in vmw_fifo_emit_dummy_gb_query()
Dvmwgfx_context.c302 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_context_bind() local
304 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_context_bind()
316 cmd->body.mobid = bo->mem.start; in vmw_gb_context_bind()
329 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_context_unbind() local
346 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_context_unbind()
384 vmw_fence_single_bo(bo, fence); in vmw_gb_context_unbind()
Dvmwgfx_surface.c446 BUG_ON(val_buf->bo == NULL); in vmw_legacy_srf_dma()
455 vmw_bo_get_guest_ptr(val_buf->bo, &ptr); in vmw_legacy_srf_dma()
467 vmw_fence_single_bo(val_buf->bo, fence); in vmw_legacy_srf_dma()
1093 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_surface_bind() local
1095 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_surface_bind()
1109 cmd1->body.mobid = bo->mem.start; in vmw_gb_surface_bind()
1127 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_surface_unbind() local
1146 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_surface_unbind()
1184 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_surface_unbind()
Dvmwgfx_shader.c186 struct ttm_buffer_object *bo = val_buf->bo; in vmw_gb_shader_bind() local
188 BUG_ON(bo->mem.mem_type != VMW_PL_MOB); in vmw_gb_shader_bind()
200 cmd->body.mobid = bo->mem.start; in vmw_gb_shader_bind()
242 vmw_fence_single_bo(val_buf->bo, fence); in vmw_gb_shader_unbind()
Dvmwgfx_drv.c300 struct ttm_buffer_object *bo; in vmw_dummy_query_bo_create() local
315 &bo); in vmw_dummy_query_bo_create()
320 ret = ttm_bo_reserve(bo, false, true, false, NULL); in vmw_dummy_query_bo_create()
323 ret = ttm_bo_kmap(bo, 0, 1, &map); in vmw_dummy_query_bo_create()
331 vmw_bo_pin(bo, false); in vmw_dummy_query_bo_create()
332 ttm_bo_unreserve(bo); in vmw_dummy_query_bo_create()
336 ttm_bo_unref(&bo); in vmw_dummy_query_bo_create()
338 dev_priv->dummy_query_bo = bo; in vmw_dummy_query_bo_create()
Dvmwgfx_scrn.c205 struct ttm_buffer_object *bo; in vmw_sou_backing_free() local
210 bo = &sou->buffer->base; in vmw_sou_backing_free()
211 ttm_bo_unref(&bo); in vmw_sou_backing_free()
Dvmwgfx_kms.c291 struct ttm_buffer_object *bo, in vmw_kms_cursor_snoop() argument
346 ret = ttm_bo_reserve(bo, true, false, false, NULL); in vmw_kms_cursor_snoop()
352 ret = ttm_bo_kmap(bo, kmap_offset, kmap_num, &map); in vmw_kms_cursor_snoop()
381 ttm_bo_unreserve(bo); in vmw_kms_cursor_snoop()
1139 struct vmw_dma_buffer *bo = NULL; in vmw_kms_fb_create() local
1186 &surface, &bo); in vmw_kms_fb_create()
1191 if (bo) in vmw_kms_fb_create()
1192 ret = vmw_kms_new_framebuffer_dmabuf(dev_priv, bo, &vfb, in vmw_kms_fb_create()
1202 if (bo) in vmw_kms_fb_create()
1203 vmw_dmabuf_unreference(&bo); in vmw_kms_fb_create()
Dvmwgfx_fb.c565 struct ttm_buffer_object *bo; in vmw_fb_close() local
572 bo = &par->vmw_bo->base; in vmw_fb_close()
580 ttm_bo_unref(&bo); in vmw_fb_close()
/linux-4.1.27/drivers/gpu/drm/bochs/
Dbochs_mm.c10 static void bochs_ttm_placement(struct bochs_bo *bo, int domain);
76 struct bochs_bo *bo; in bochs_bo_ttm_destroy() local
78 bo = container_of(tbo, struct bochs_bo, bo); in bochs_bo_ttm_destroy()
79 drm_gem_object_release(&bo->gem); in bochs_bo_ttm_destroy()
80 kfree(bo); in bochs_bo_ttm_destroy()
83 static bool bochs_ttm_bo_is_bochs_bo(struct ttm_buffer_object *bo) in bochs_ttm_bo_is_bochs_bo() argument
85 if (bo->destroy == &bochs_bo_ttm_destroy) in bochs_ttm_bo_is_bochs_bo()
115 bochs_bo_evict_flags(struct ttm_buffer_object *bo, struct ttm_placement *pl) in bochs_bo_evict_flags() argument
117 struct bochs_bo *bochsbo = bochs_bo(bo); in bochs_bo_evict_flags()
119 if (!bochs_ttm_bo_is_bochs_bo(bo)) in bochs_bo_evict_flags()
[all …]
Dbochs_fbdev.c18 struct bochs_bo *bo = gem_to_bochs_bo(bochs->fb.gfb.obj); in bochsfb_mmap() local
20 return ttm_fbdev_mmap(vma, &bo->bo); in bochsfb_mmap()
65 struct bochs_bo *bo = NULL; in bochsfb_create() local
85 bo = gem_to_bochs_bo(gobj); in bochsfb_create()
87 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochsfb_create()
91 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, NULL); in bochsfb_create()
94 ttm_bo_unreserve(&bo->bo); in bochsfb_create()
98 ret = ttm_bo_kmap(&bo->bo, 0, bo->bo.num_pages, in bochsfb_create()
99 &bo->kmap); in bochsfb_create()
102 ttm_bo_unreserve(&bo->bo); in bochsfb_create()
[all …]
Dbochs_kms.c46 struct bochs_bo *bo; in bochs_crtc_mode_set_base() local
52 bo = gem_to_bochs_bo(bochs_fb->obj); in bochs_crtc_mode_set_base()
53 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochs_crtc_mode_set_base()
57 bochs_bo_unpin(bo); in bochs_crtc_mode_set_base()
58 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
66 bo = gem_to_bochs_bo(bochs_fb->obj); in bochs_crtc_mode_set_base()
67 ret = ttm_bo_reserve(&bo->bo, true, false, false, NULL); in bochs_crtc_mode_set_base()
71 ret = bochs_bo_pin(bo, TTM_PL_FLAG_VRAM, &gpu_addr); in bochs_crtc_mode_set_base()
73 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
77 ttm_bo_unreserve(&bo->bo); in bochs_crtc_mode_set_base()
Dbochs.h100 struct ttm_buffer_object bo; member
108 static inline struct bochs_bo *bochs_bo(struct ttm_buffer_object *bo) in bochs_bo() argument
110 return container_of(bo, struct bochs_bo, bo); in bochs_bo()
120 static inline u64 bochs_bo_mmap_offset(struct bochs_bo *bo) in bochs_bo_mmap_offset() argument
122 return drm_vma_node_offset_addr(&bo->bo.vma_node); in bochs_bo_mmap_offset()
154 int bochs_bo_pin(struct bochs_bo *bo, u32 pl_flag, u64 *gpu_addr);
155 int bochs_bo_unpin(struct bochs_bo *bo);
/linux-4.1.27/include/linux/
Dhost1x.h61 struct host1x_bo *(*get)(struct host1x_bo *bo);
62 void (*put)(struct host1x_bo *bo);
63 dma_addr_t (*pin)(struct host1x_bo *bo, struct sg_table **sgt);
64 void (*unpin)(struct host1x_bo *bo, struct sg_table *sgt);
65 void *(*mmap)(struct host1x_bo *bo);
66 void (*munmap)(struct host1x_bo *bo, void *addr);
67 void *(*kmap)(struct host1x_bo *bo, unsigned int pagenum);
68 void (*kunmap)(struct host1x_bo *bo, unsigned int pagenum, void *addr);
75 static inline void host1x_bo_init(struct host1x_bo *bo, in host1x_bo_init() argument
78 bo->ops = ops; in host1x_bo_init()
[all …]
/linux-4.1.27/include/drm/ttm/
Dttm_bo_driver.h210 struct ttm_buffer_object *bo,
381 void(*evict_flags) (struct ttm_buffer_object *bo,
396 int (*move) (struct ttm_buffer_object *bo,
413 int (*verify_access) (struct ttm_buffer_object *bo,
418 void (*move_notify)(struct ttm_buffer_object *bo,
422 int (*fault_reserve_notify)(struct ttm_buffer_object *bo);
427 void (*swap_notify) (struct ttm_buffer_object *bo);
694 extern int ttm_bo_mem_space(struct ttm_buffer_object *bo,
700 extern void ttm_bo_mem_put(struct ttm_buffer_object *bo,
702 extern void ttm_bo_mem_put_locked(struct ttm_buffer_object *bo,
[all …]
Dttm_bo_api.h285 struct ttm_buffer_object *bo; member
297 ttm_bo_reference(struct ttm_buffer_object *bo) in ttm_bo_reference() argument
299 kref_get(&bo->kref); in ttm_bo_reference()
300 return bo; in ttm_bo_reference()
317 extern int ttm_bo_wait(struct ttm_buffer_object *bo, bool lazy,
335 extern int ttm_bo_validate(struct ttm_buffer_object *bo,
347 extern void ttm_bo_unref(struct ttm_buffer_object **bo);
359 extern void ttm_bo_list_ref_sub(struct ttm_buffer_object *bo, int count,
372 extern void ttm_bo_add_to_lru(struct ttm_buffer_object *bo);
384 extern int ttm_bo_del_from_lru(struct ttm_buffer_object *bo);
[all …]
Dttm_execbuf_util.h47 struct ttm_buffer_object *bo; member
/linux-4.1.27/drivers/gpu/drm/nouveau/
Dnouveau_bo.c134 nouveau_bo_del_ttm(struct ttm_buffer_object *bo) in nouveau_bo_del_ttm() argument
136 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_bo_del_ttm()
138 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_bo_del_ttm()
141 DRM_ERROR("bo %p still attached to GEM object\n", bo); in nouveau_bo_del_ttm()
151 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_bo_fixup_align()
215 nvbo->bo.bdev = &drm->ttm.bdev; in nouveau_bo_new()
227 nvbo->bo.mem.num_pages = size >> PAGE_SHIFT; in nouveau_bo_new()
233 ret = ttm_bo_init(&drm->ttm.bdev, &nvbo->bo, size, in nouveau_bo_new()
262 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in set_placement_range()
268 nvbo->bo.mem.num_pages < vram_pages / 4) { in set_placement_range()
[all …]
Dnv50_fence.c40 struct ttm_mem_reg *mem = &priv->bo->bo.mem; in nv50_fence_context_new()
65 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv50_fence_context_new() local
66 u32 start = bo->bo.mem.start * PAGE_SIZE; in nv50_fence_context_new()
67 u32 limit = start + bo->bo.mem.size - 1; in nv50_fence_context_new()
103 0, 0x0000, NULL, NULL, &priv->bo); in nv50_fence_create()
105 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false); in nv50_fence_create()
107 ret = nouveau_bo_map(priv->bo); in nv50_fence_create()
109 nouveau_bo_unpin(priv->bo); in nv50_fence_create()
112 nouveau_bo_ref(NULL, &priv->bo); in nv50_fence_create()
120 nouveau_bo_wr32(priv->bo, 0x000, 0x00000000); in nv50_fence_create()
Dnouveau_gem.c39 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_del()
40 struct ttm_buffer_object *bo = &nvbo->bo; in nouveau_gem_object_del() local
49 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del()
55 ttm_bo_unref(&bo); in nouveau_gem_object_del()
66 struct nouveau_drm *drm = nouveau_bdev(nvbo->bo.bdev); in nouveau_gem_object_open()
74 ret = ttm_bo_reserve(&nvbo->bo, false, false, false, NULL); in nouveau_gem_object_open()
101 ttm_bo_unreserve(&nvbo->bo); in nouveau_gem_object_open()
117 const bool mapped = nvbo->bo.mem.mem_type != TTM_PL_SYSTEM; in nouveau_gem_object_unmap()
118 struct reservation_object *resv = nvbo->bo.resv; in nouveau_gem_object_unmap()
127 ttm_bo_wait(&nvbo->bo, true, false, false); in nouveau_gem_object_unmap()
[all …]
Dnv84_fence.c107 return nouveau_bo_rd32(priv->bo, chan->chid * 16/4); in nv84_fence_read()
119 struct nouveau_bo *bo = nv50_display_crtc_sema(dev, i); in nv84_fence_context_del() local
120 nouveau_bo_vma_del(bo, &fctx->dispc_vma[i]); in nv84_fence_context_del()
123 nouveau_bo_wr32(priv->bo, chan->chid * 16 / 4, fctx->base.sequence); in nv84_fence_context_del()
124 nouveau_bo_vma_del(priv->bo, &fctx->vma_gart); in nv84_fence_context_del()
125 nouveau_bo_vma_del(priv->bo, &fctx->vma); in nv84_fence_context_del()
151 ret = nouveau_bo_vma_add(priv->bo, cli->vm, &fctx->vma); in nv84_fence_context_new()
159 struct nouveau_bo *bo = nv50_display_crtc_sema(chan->drm->dev, i); in nv84_fence_context_new() local
160 ret = nouveau_bo_vma_add(bo, cli->vm, &fctx->dispc_vma[i]); in nv84_fence_context_new()
177 priv->suspend[i] = nouveau_bo_rd32(priv->bo, i*4); in nv84_fence_suspend()
[all …]
Dnouveau_bo.h11 struct ttm_buffer_object bo; member
45 nouveau_bo(struct ttm_buffer_object *bo) in nouveau_bo() argument
47 return container_of(bo, struct nouveau_bo, bo); in nouveau_bo()
59 *pnvbo = ref ? nouveau_bo(ttm_bo_reference(&ref->bo)) : NULL; in nouveau_bo_ref()
61 struct ttm_buffer_object *bo = &prev->bo; in nouveau_bo_ref() local
63 ttm_bo_unref(&bo); in nouveau_bo_ref()
Dnv17_fence.c78 struct ttm_mem_reg *mem = &priv->bo->bo.mem; in nv17_fence_context_new()
110 nouveau_bo_wr32(priv->bo, 0, priv->sequence); in nv17_fence_resume()
132 0, 0x0000, NULL, NULL, &priv->bo); in nv17_fence_create()
134 ret = nouveau_bo_pin(priv->bo, TTM_PL_FLAG_VRAM, false); in nv17_fence_create()
136 ret = nouveau_bo_map(priv->bo); in nv17_fence_create()
138 nouveau_bo_unpin(priv->bo); in nv17_fence_create()
141 nouveau_bo_ref(NULL, &priv->bo); in nv17_fence_create()
149 nouveau_bo_wr32(priv->bo, 0x000, 0x00000000); in nv17_fence_create()
Dnouveau_prime.c34 int npages = nvbo->bo.num_pages; in nouveau_gem_prime_get_sg_table()
36 return drm_prime_pages_to_sg(nvbo->bo.ttm->pages, npages); in nouveau_gem_prime_get_sg_table()
44 ret = ttm_bo_kmap(&nvbo->bo, 0, nvbo->bo.num_pages, in nouveau_gem_prime_vmap()
81 ret = drm_gem_object_init(dev, &nvbo->gem, nvbo->bo.mem.size); in nouveau_gem_prime_import_sg_table()
114 return nvbo->bo.resv; in nouveau_gem_prime_res_obj()
Dnv10_fence.c89 nouveau_bo_unmap(priv->bo); in nv10_fence_destroy()
90 if (priv->bo) in nv10_fence_destroy()
91 nouveau_bo_unpin(priv->bo); in nv10_fence_destroy()
92 nouveau_bo_ref(NULL, &priv->bo); in nv10_fence_destroy()
Dnouveau_display.c643 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset); in nouveau_display_resume()
727 ret = ttm_bo_reserve(&new_bo->bo, true, false, false, NULL); in nouveau_crtc_page_flip()
734 ttm_bo_unreserve(&new_bo->bo); in nouveau_crtc_page_flip()
739 ttm_bo_unreserve(&new_bo->bo); in nouveau_crtc_page_flip()
741 ret = ttm_bo_reserve(&old_bo->bo, true, false, false, NULL); in nouveau_crtc_page_flip()
750 new_bo->bo.offset }; in nouveau_crtc_page_flip()
791 ttm_bo_unreserve(&old_bo->bo); in nouveau_crtc_page_flip()
799 ttm_bo_unreserve(&old_bo->bo); in nouveau_crtc_page_flip()
871 struct nouveau_bo *bo; in nouveau_display_dumb_create() local
885 ret = nouveau_gem_new(dev, args->size, 0, domain, 0, 0, &bo); in nouveau_display_dumb_create()
[all …]
Dnouveau_ttm.c74 struct ttm_buffer_object *bo, in nouveau_vram_manager_new() argument
80 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_vram_manager_new()
163 struct ttm_buffer_object *bo, in nouveau_gart_manager_new() argument
167 struct nouveau_drm *drm = nouveau_bdev(bo->bdev); in nouveau_gart_manager_new()
168 struct nouveau_bo *nvbo = nouveau_bo(bo); in nouveau_gart_manager_new()
243 struct ttm_buffer_object *bo, in nv04_gart_manager_new() argument
Dnv10_fence.h15 struct nouveau_bo *bo; member
Dnouveau_abi16.c294 if (chan->chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) in nouveau_abi16_ioctl_channel_alloc()
467 args.ctxdma.start += drm->agp.base + chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
468 args.ctxdma.limit += drm->agp.base + chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
473 args.ctxdma.start += chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
474 args.ctxdma.limit += chan->ntfy->bo.offset; in nouveau_abi16_ioctl_notifierobj_alloc()
Dnouveau_dma.c82 nv50_dma_push(struct nouveau_channel *chan, struct nouveau_bo *bo, in nv50_dma_push() argument
91 vma = nouveau_bo_vma_find(bo, cli->vm); in nv50_dma_push()
Dnouveau_fence.h99 struct nouveau_bo *bo; member
Dnv50_display.c649 evo_data(push, nv_fb->nvbo->bo.offset >> 8); in nv50_display_flip_next()
656 evo_data(push, nv_fb->nvbo->bo.offset >> 8); in nv50_display_flip_next()
888 evo_data(push, nvfb->nvbo->bo.offset >> 8); in nv50_crtc_set_image()
901 evo_data(push, nvfb->nvbo->bo.offset >> 8); in nv50_crtc_set_image()
931 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
936 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
942 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8); in nv50_crtc_cursor_show()
1055 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
1062 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
1070 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8); in nv50_crtc_commit()
[all …]
Dnouveau_fbcon.c426 info->fix.smem_start = nvbo->bo.mem.bus.base + in nouveau_fbcon_create()
427 nvbo->bo.mem.bus.offset; in nouveau_fbcon_create()
447 nvbo->bo.offset, nvbo); in nouveau_fbcon_create()
Dnouveau_chan.c126 chan->push.vma.offset = chan->push.buffer->bo.offset; in nouveau_channel_prep()
141 if (chan->push.buffer->bo.mem.mem_type == TTM_PL_VRAM) { in nouveau_channel_prep()
Dnouveau_fence.c395 struct reservation_object *resv = nvbo->bo.resv; in nouveau_fence_sync()
/linux-4.1.27/crypto/
Daes_generic.c1300 #define f_rn(bo, bi, n, k) do { \ argument
1301 bo[n] = crypto_ft_tab[0][byte(bi[n], 0)] ^ \
1307 #define f_nround(bo, bi, k) do {\ argument
1308 f_rn(bo, bi, 0, k); \
1309 f_rn(bo, bi, 1, k); \
1310 f_rn(bo, bi, 2, k); \
1311 f_rn(bo, bi, 3, k); \
1315 #define f_rl(bo, bi, n, k) do { \ argument
1316 bo[n] = crypto_fl_tab[0][byte(bi[n], 0)] ^ \
1322 #define f_lround(bo, bi, k) do {\ argument
[all …]
/linux-4.1.27/drivers/gpu/drm/msm/
Dmsm_ringbuffer.c35 ring->bo = msm_gem_new(gpu->dev, size, MSM_BO_WC); in msm_ringbuffer_new()
36 if (IS_ERR(ring->bo)) { in msm_ringbuffer_new()
37 ret = PTR_ERR(ring->bo); in msm_ringbuffer_new()
38 ring->bo = NULL; in msm_ringbuffer_new()
42 ring->start = msm_gem_vaddr_locked(ring->bo); in msm_ringbuffer_new()
58 if (ring->bo) in msm_ringbuffer_destroy()
59 drm_gem_object_unreference_unlocked(ring->bo); in msm_ringbuffer_destroy()
Dmsm_fbdev.c37 struct drm_gem_object *bo; member
64 struct drm_gem_object *drm_obj = fbdev->bo; in msm_fbdev_mmap()
113 fbdev->bo = msm_gem_new(dev, size, MSM_BO_SCANOUT | in msm_fbdev_create()
116 if (IS_ERR(fbdev->bo)) { in msm_fbdev_create()
117 ret = PTR_ERR(fbdev->bo); in msm_fbdev_create()
118 fbdev->bo = NULL; in msm_fbdev_create()
123 fb = msm_framebuffer_init(dev, &mode_cmd, &fbdev->bo); in msm_fbdev_create()
129 drm_gem_object_unreference(fbdev->bo); in msm_fbdev_create()
141 ret = msm_gem_get_iova_locked(fbdev->bo, 0, &paddr); in msm_fbdev_create()
177 fbi->screen_base = msm_gem_vaddr_locked(fbdev->bo); in msm_fbdev_create()
[all …]
Dmsm_fb.c51 struct drm_gem_object *bo = msm_fb->planes[i]; in msm_framebuffer_destroy() local
52 if (bo) in msm_framebuffer_destroy()
53 drm_gem_object_unreference_unlocked(bo); in msm_framebuffer_destroy()
Dmsm_ringbuffer.h26 struct drm_gem_object *bo; member
Dmsm_gpu.c645 msm_gem_put_iova(gpu->rb->bo, gpu->id); in msm_gpu_cleanup()
/linux-4.1.27/drivers/gpu/host1x/
Djob.c100 void host1x_job_add_gather(struct host1x_job *job, struct host1x_bo *bo, in host1x_job_add_gather() argument
106 cur_gather->bo = bo; in host1x_job_add_gather()
155 if (patch != wait->bo) in do_waitchks()
158 trace_host1x_syncpt_wait_check(wait->bo, wait->offset, in do_waitchks()
171 wait->bo = NULL; in do_waitchks()
188 reloc->target.bo = host1x_bo_get(reloc->target.bo); in pin_job()
189 if (!reloc->target.bo) in pin_job()
192 phys_addr = host1x_bo_pin(reloc->target.bo, &sgt); in pin_job()
197 job->unpins[job->num_unpins].bo = reloc->target.bo; in pin_job()
207 g->bo = host1x_bo_get(g->bo); in pin_job()
[all …]
Djob.h25 struct host1x_bo *bo; member
38 struct host1x_bo *bo; member
45 struct host1x_bo *bo; member
/linux-4.1.27/net/can/
Dbcm.c163 struct bcm_sock *bo = bcm_sk(sk); in bcm_proc_show() local
168 seq_printf(m, " / bo %pK", bo); in bcm_proc_show()
169 seq_printf(m, " / dropped %lu", bo->dropped_usr_msgs); in bcm_proc_show()
170 seq_printf(m, " / bound %s", bcm_proc_getifname(ifname, bo->ifindex)); in bcm_proc_show()
173 list_for_each_entry(op, &bo->rx_ops, list) { in bcm_proc_show()
204 list_for_each_entry(op, &bo->tx_ops, list) { in bcm_proc_show()
340 struct bcm_sock *bo = bcm_sk(sk); in bcm_send_to_user() local
344 bo->dropped_usr_msgs++; in bcm_send_to_user()
831 struct bcm_sock *bo = bcm_sk(sk); in bcm_tx_setup() local
845 op = bcm_find_op(&bo->tx_ops, msg_head->can_id, ifindex); in bcm_tx_setup()
[all …]
/linux-4.1.27/drivers/crypto/vmx/
Dppc-xlate.pl106 my $bo = $f=~/[\+\-]/ ? 16+9 : 16; # optional "to be taken" hint
107 " bc $bo,0,".shift;
111 my $bo = $f=~/\-/ ? 12+2 : 12; # optional "not to be taken" hint
113 " .long ".sprintf "0x%x",19<<26|$bo<<21|16<<1 :
114 " bclr $bo,0";
118 my $bo = $f=~/\-/ ? 4+2 : 4; # optional "not to be taken" hint
120 " .long ".sprintf "0x%x",19<<26|$bo<<21|2<<16|16<<1 :
121 " bclr $bo,2";
125 my $bo = $f=~/-/ ? 12+2 : 12; # optional "not to be taken" hint
127 " .long ".sprintf "0x%X",19<<26|$bo<<21|2<<16|16<<1 :
[all …]
/linux-4.1.27/include/trace/events/
Dhost1x.h84 TP_PROTO(const char *name, struct host1x_bo *bo,
87 TP_ARGS(name, bo, words, offset, cmdbuf),
91 __field(struct host1x_bo *, bo)
105 __entry->bo = bo;
111 __entry->name, __entry->bo,
226 TP_PROTO(struct host1x_bo *bo, u32 offset, u32 syncpt_id, u32 thresh,
229 TP_ARGS(bo, offset, syncpt_id, thresh, min),
232 __field(struct host1x_bo *, bo)
240 __entry->bo = bo;
248 __entry->bo, __entry->offset,
/linux-4.1.27/drivers/gpu/drm/omapdrm/
Domap_fb.c79 struct drm_gem_object *bo; member
100 omap_fb->planes[0].bo, handle); in omap_framebuffer_create_handle()
114 if (plane->bo) in omap_framebuffer_destroy()
115 drm_gem_object_unreference_unlocked(plane->bo); in omap_framebuffer_destroy()
168 if (omap_gem_flags(plane->bo) & OMAP_BO_TILED) { in omap_framebuffer_update_scanout()
206 omap_gem_rotated_paddr(plane->bo, orient, x, y, &info->paddr); in omap_framebuffer_update_scanout()
208 info->screen_width = omap_gem_tiled_stride(plane->bo, orient); in omap_framebuffer_update_scanout()
236 WARN_ON(!(omap_gem_flags(plane->bo) & OMAP_BO_TILED)); in omap_framebuffer_update_scanout()
237 omap_gem_rotated_paddr(plane->bo, orient, in omap_framebuffer_update_scanout()
260 ret = omap_gem_get_paddr(plane->bo, &plane->paddr, true); in omap_framebuffer_pin()
[all …]
Domap_fbdev.c38 struct drm_gem_object *bo; member
55 omap_gem_roll(fbdev->bo, fbi->var.yoffset * npages); in pan_worker()
146 fbdev->bo = omap_gem_new(dev, gsize, OMAP_BO_SCANOUT | OMAP_BO_WC); in omap_fbdev_create()
147 if (!fbdev->bo) { in omap_fbdev_create()
153 fb = omap_framebuffer_init(dev, &mode_cmd, &fbdev->bo); in omap_fbdev_create()
159 drm_gem_object_unreference(fbdev->bo); in omap_fbdev_create()
172 ret = omap_gem_get_paddr(fbdev->bo, &paddr, true); in omap_fbdev_create()
212 fbi->screen_base = omap_gem_vaddr(fbdev->bo); in omap_fbdev_create()
213 fbi->screen_size = fbdev->bo->size; in omap_fbdev_create()
215 fbi->fix.smem_len = fbdev->bo->size; in omap_fbdev_create()
[all …]
Domap_crtc.c582 struct drm_gem_object *bo; in page_flip_worker() local
591 bo = omap_framebuffer_bo(crtc->primary->fb, 0); in page_flip_worker()
592 drm_gem_object_unreference_unlocked(bo); in page_flip_worker()
613 struct drm_gem_object *bo; in omap_crtc_page_flip_locked() local
638 bo = omap_framebuffer_bo(fb, 0); in omap_crtc_page_flip_locked()
639 drm_gem_object_reference(bo); in omap_crtc_page_flip_locked()
641 omap_gem_op_async(bo, OMAP_GEM_READ, page_flip_cb, crtc); in omap_crtc_page_flip_locked()
/linux-4.1.27/drivers/gpu/drm/nouveau/nvkm/engine/gr/
Dctxgf108.c743 u32 bo = 0; in gf108_grctx_generate_attrib() local
744 u32 ao = bo + impl->attrib_nr_max * priv->tpc_total; in gf108_grctx_generate_attrib()
758 mmio_skip(info, o + 0x20, (t << 28) | (b << 16) | ++bo); in gf108_grctx_generate_attrib()
759 mmio_wr32(info, o + 0x20, (t << 28) | (b << 16) | --bo); in gf108_grctx_generate_attrib()
760 bo += impl->attrib_nr_max; in gf108_grctx_generate_attrib()
Dctxgf117.c195 u32 bo = 0; in gf117_grctx_generate_attrib() local
196 u32 ao = bo + impl->attrib_nr_max * priv->tpc_total; in gf117_grctx_generate_attrib()
210 mmio_skip(info, o + 0xc0, (t << 28) | (b << 16) | ++bo); in gf117_grctx_generate_attrib()
211 mmio_wr32(info, o + 0xc0, (t << 28) | (b << 16) | --bo); in gf117_grctx_generate_attrib()
212 bo += impl->attrib_nr_max * priv->ppc_tpc_nr[gpc][ppc]; in gf117_grctx_generate_attrib()
Dctxgm107.c907 u32 bo = 0; in gm107_grctx_generate_attrib() local
908 u32 ao = bo + impl->attrib_nr_max * priv->tpc_total; in gm107_grctx_generate_attrib()
924 mmio_wr32(info, o + 0xf4, bo); in gm107_grctx_generate_attrib()
925 bo += impl->attrib_nr_max * priv->ppc_tpc_nr[gpc][ppc]; in gm107_grctx_generate_attrib()
Dctxgf100.c1064 u32 bo = 0; in gf100_grctx_generate_attrib() local
1073 mmio_skip(info, o, (attrib << 16) | ++bo); in gf100_grctx_generate_attrib()
1074 mmio_wr32(info, o, (attrib << 16) | --bo); in gf100_grctx_generate_attrib()
1075 bo += impl->attrib_nr_max; in gf100_grctx_generate_attrib()
/linux-4.1.27/drivers/gpu/host1x/hw/
Dchannel_hw.c32 static void trace_write_gather(struct host1x_cdma *cdma, struct host1x_bo *bo, in trace_write_gather() argument
39 mem = host1x_bo_mmap(bo); in trace_write_gather()
51 trace_host1x_cdma_push_gather(dev_name(dev), bo, in trace_write_gather()
56 host1x_bo_munmap(bo, mem); in trace_write_gather()
69 trace_write_gather(cdma, g->bo, g->offset, op1 & 0xffff); in submit_gathers()
Ddebug_hw.c159 mapped = host1x_bo_mmap(g->bo); in show_channel_gathers()
173 host1x_bo_munmap(g->bo, mapped); in show_channel_gathers()
/linux-4.1.27/kernel/trace/
Dtrace_probe.c473 unsigned long bw, bo; in __parse_bitfield_probe_arg() local
492 bo = simple_strtoul(bf, &tail, 0); in __parse_bitfield_probe_arg()
497 bprm->hi_shift = BYTES_TO_BITS(t->size) - (bw + bo); in __parse_bitfield_probe_arg()
498 bprm->low_shift = bprm->hi_shift + bo; in __parse_bitfield_probe_arg()
500 return (BYTES_TO_BITS(t->size) < (bw + bo)) ? -EINVAL : 0; in __parse_bitfield_probe_arg()
/linux-4.1.27/scripts/coccinelle/misc/
Difaddr.cocci1 /// the address of a variable or field is non-zero is likely always to bo
/linux-4.1.27/net/ieee802154/
Dtrace.h37 #define BOOL_TO_STR(bo) (bo) ? "true" : "false" argument
/linux-4.1.27/drivers/usb/wusbcore/
Dcrypto.c128 u8 *bo = _bo; in bytewise_xor() local
132 bo[itr] = bi1[itr] ^ bi2[itr]; in bytewise_xor()
/linux-4.1.27/arch/mips/include/asm/xtalk/
Dxwidget.h133 unsigned bo:1; member
/linux-4.1.27/drivers/gpu/drm/nouveau/dispnv04/
Doverlay.c139 nvif_wr32(dev, NV_PVIDEO_OFFSET_BUFF(flip), nv_fb->nvbo->bo.offset); in nv10_update_plane()
159 nv_fb->nvbo->bo.offset + fb->offsets[1]); in nv10_update_plane()
388 nv_fb->nvbo->bo.offset); in nv04_update_plane()
Dcrtc.c852 nv_crtc->fb.offset = fb->nvbo->bo.offset; in nv04_crtc_do_mode_set_base()
1020 nv_crtc->cursor.offset = nv_crtc->cursor.nvbo->bo.offset; in nv04_crtc_cursor_set()
/linux-4.1.27/drivers/gpu/ipu-v3/
Dipu-cpmem.c295 int bpp = 0, npb = 0, ro, go, bo, to; in ipu_cpmem_set_format_rgb() local
299 bo = rgb->bits_per_pixel - rgb->blue.length - rgb->blue.offset; in ipu_cpmem_set_format_rgb()
307 ipu_ch_param_write_field(ch, IPU_FIELD_OFS2, bo); in ipu_cpmem_set_format_rgb()
/linux-4.1.27/drivers/gpu/drm/gma500/
Dpsb_intel_drv.h97 size_t(*bo_offset) (struct drm_device *dev, void *bo);
/linux-4.1.27/drivers/media/usb/dvb-usb/
Dtechnisat-usb2.c407 u8 bo = offset & 0xff; in technisat_usb2_eeprom_lrc_read() local
411 .buf = &bo, in technisat_usb2_eeprom_lrc_read()
/linux-4.1.27/arch/powerpc/lib/
Dsstep.c65 unsigned int bo = (instr >> 21) & 0x1f; in branch_taken() local
68 if ((bo & 4) == 0) { in branch_taken()
71 if (((bo >> 1) & 1) ^ (regs->ctr == 0)) in branch_taken()
74 if ((bo & 0x10) == 0) { in branch_taken()
77 if (((regs->ccr >> (31 - bi)) & 1) != ((bo >> 3) & 1)) in branch_taken()
/linux-4.1.27/drivers/gpu/drm/msm/adreno/
Dadreno_gpu.c60 ret = msm_gem_get_iova(gpu->rb->bo, gpu->id, &gpu->rb_iova); in adreno_hw_init()
/linux-4.1.27/arch/mips/include/asm/sn/sn0/
Dhubio.h882 bo: 1, /* 31: barrier op set in xtalk rqst*/ member
/linux-4.1.27/arch/powerpc/xmon/
Dppc-opc.c1559 #define BBO(op, bo, aa, lk) (B ((op), (aa), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21)) argument
1573 #define BBOCB(op, bo, cb, aa, lk) \ argument
1574 (BBO ((op), (bo), (aa), (lk)) | ((((unsigned long)(cb)) & 0x3) << 16))
1752 #define XLO(op, bo, xop, lk) \ argument
1753 (XLLK ((op), (xop), (lk)) | ((((unsigned long)(bo)) & 0x1f) << 21))
1763 #define XLOCB(op, bo, cb, xop, lk) \ argument
1764 (XLO ((op), (bo), (xop), (lk)) | ((((unsigned long)(cb)) & 3) << 16))
/linux-4.1.27/net/wireless/
Dtrace.h188 #define BOOL_TO_STR(bo) (bo) ? "true" : "false" argument
/linux-4.1.27/drivers/usb/host/
Doxu210hp-hcd.c3696 static const char * const bo[] = { in oxu_verify_id() local
3711 bo[(id & OXU_BO_MASK) >> OXU_BO_SHIFT], in oxu_verify_id()