Home
last modified time | relevance | path

Searched refs:gem (Results 1 – 63 of 63) sorted by relevance

/linux-4.1.27/drivers/gpu/drm/tegra/
Dgem.c31 struct drm_device *drm = obj->gem.dev; in tegra_bo_put()
34 drm_gem_object_unreference(&obj->gem); in tegra_bo_put()
75 struct drm_device *drm = obj->gem.dev; in tegra_bo_get()
78 drm_gem_object_reference(&obj->gem); in tegra_bo_get()
107 err = drm_mm_insert_node_generic(&tegra->mm, bo->mm, bo->gem.size, in tegra_bo_iommu_map()
160 err = drm_gem_object_init(drm, &bo->gem, size); in tegra_bo_alloc_object()
164 err = drm_gem_create_mmap_offset(&bo->gem); in tegra_bo_alloc_object()
171 drm_gem_object_release(&bo->gem); in tegra_bo_alloc_object()
180 drm_gem_put_pages(&bo->gem, bo->pages, true, true); in tegra_bo_free()
184 dma_free_writecombine(drm->dev, bo->gem.size, bo->vaddr, in tegra_bo_free()
[all …]
Dgem.h34 struct drm_gem_object gem; member
50 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument
52 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo()
62 void tegra_bo_free_object(struct drm_gem_object *gem);
73 struct drm_gem_object *gem,
Ddrm.c264 struct drm_gem_object *gem; in host1x_bo_lookup() local
267 gem = drm_gem_object_lookup(drm, file, handle); in host1x_bo_lookup()
268 if (!gem) in host1x_bo_lookup()
272 drm_gem_object_unreference(gem); in host1x_bo_lookup()
275 bo = to_tegra_bo(gem); in host1x_bo_lookup()
456 struct drm_gem_object *gem; in tegra_gem_mmap() local
459 gem = drm_gem_object_lookup(drm, file, args->handle); in tegra_gem_mmap()
460 if (!gem) in tegra_gem_mmap()
463 bo = to_tegra_bo(gem); in tegra_gem_mmap()
465 args->offset = drm_vma_node_offset_addr(&bo->gem.vma_node); in tegra_gem_mmap()
[all …]
Dfb.c72 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fb_destroy()
86 return drm_gem_handle_create(file, &fb->planes[0]->gem, handle); in tegra_fb_create_handle()
138 struct drm_gem_object *gem; in tegra_fb_create() local
150 gem = drm_gem_object_lookup(drm, file, cmd->handles[i]); in tegra_fb_create()
151 if (!gem) { in tegra_fb_create()
161 if (gem->size < size) { in tegra_fb_create()
166 planes[i] = to_tegra_bo(gem); in tegra_fb_create()
179 drm_gem_object_unreference_unlocked(&planes[i]->gem); in tegra_fb_create()
230 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe()
239 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe()
DMakefile5 gem.o \
/linux-4.1.27/drivers/gpu/drm/nouveau/
Dnouveau_gem.c36 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument
38 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del()
48 if (gem->import_attach) in nouveau_gem_object_del()
49 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del()
51 drm_gem_object_release(gem); in nouveau_gem_object_del()
54 gem->filp = NULL; in nouveau_gem_object_del()
62 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument
65 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open()
145 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument
148 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close()
[all …]
Dnouveau_gem.h13 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument
15 return gem ? container_of(gem, struct nouveau_bo, gem) : NULL; in nouveau_gem_object()
Dnouveau_display.c222 drm_gem_object_unreference_unlocked(&fb->nvbo->gem); in nouveau_user_framebuffer_destroy()
235 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); in nouveau_user_framebuffer_create_handle()
275 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local
278 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create()
279 if (!gem) in nouveau_user_framebuffer_create()
286 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem)); in nouveau_user_framebuffer_create()
295 drm_gem_object_unreference(gem); in nouveau_user_framebuffer_create()
889 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); in nouveau_display_dumb_create()
890 drm_gem_object_unreference_unlocked(&bo->gem); in nouveau_display_dumb_create()
899 struct drm_gem_object *gem; in nouveau_display_dumb_map_offset() local
[all …]
Dnouveau_ttm.c393 drm->gem.vram_available = drm->device.info.ram_user; in nouveau_ttm_init()
396 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init()
407 drm->gem.gart_available = nvxx_mmu(&drm->device)->limit; in nouveau_ttm_init()
409 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init()
413 drm->gem.gart_available >> PAGE_SHIFT); in nouveau_ttm_init()
419 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init()
420 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
Dnouveau_prime.c81 ret = drm_gem_object_init(dev, &nvbo->gem, nvbo->bo.mem.size); in nouveau_gem_prime_import_sg_table()
87 return &nvbo->gem; in nouveau_gem_prime_import_sg_table()
Dnouveau_abi16.c127 drm_gem_object_unreference_unlocked(&chan->ntfy->gem); in nouveau_abi16_chan_fini()
200 getparam->value = drm->gem.vram_available; in nouveau_abi16_ioctl_getparam()
203 getparam->value = drm->gem.gart_available; in nouveau_abi16_ioctl_getparam()
322 ret = drm_gem_handle_create(file_priv, &chan->ntfy->gem, in nouveau_abi16_ioctl_channel_alloc()
Dnouveau_bo.h36 struct drm_gem_object gem; member
Dnouveau_drm.h139 } gem; member
Dnouveau_bo.c140 if (unlikely(nvbo->gem.filp)) in nouveau_bo_del_ttm()
370 drm->gem.vram_available -= bo->mem.size; in nouveau_bo_pin()
373 drm->gem.gart_available -= bo->mem.size; in nouveau_bo_pin()
408 drm->gem.vram_available += bo->mem.size; in nouveau_bo_unpin()
411 drm->gem.gart_available += bo->mem.size; in nouveau_bo_unpin()
1351 return drm_vma_node_verify_access(&nvbo->gem.vma_node, filp); in nouveau_bo_verify_access()
Dnv50_display.c1283 struct drm_gem_object *gem = NULL; in nv50_crtc_cursor_set() local
1291 gem = drm_gem_object_lookup(dev, file_priv, handle); in nv50_crtc_cursor_set()
1292 if (unlikely(!gem)) in nv50_crtc_cursor_set()
1294 nvbo = nouveau_gem_object(gem); in nv50_crtc_cursor_set()
1304 drm_gem_object_unreference_unlocked(gem); in nv50_crtc_cursor_set()
Dnouveau_fbcon.c491 drm_gem_object_unreference_unlocked(&nouveau_fb->nvbo->gem); in nouveau_fbcon_destroy()
/linux-4.1.27/drivers/gpu/drm/gma500/
Dgem.c34 struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); in psb_gem_free_object()
115 if (drm_gem_object_init(dev, &r->gem, size) != 0) { in psb_gem_create()
122 mapping_set_gfp_mask(r->gem.filp->f_mapping, GFP_KERNEL | __GFP_DMA32); in psb_gem_create()
124 ret = drm_gem_handle_create(file, &r->gem, &handle); in psb_gem_create()
127 &r->gem, size); in psb_gem_create()
128 drm_gem_object_release(&r->gem); in psb_gem_create()
133 drm_gem_object_unreference_unlocked(&r->gem); in psb_gem_create()
188 r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ in psb_gem_fault()
Dgtt.c209 pages = drm_gem_get_pages(&gt->gem); in psb_gtt_attach_pages()
213 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages()
230 drm_gem_put_pages(&gt->gem, gt->pages, true, false); in psb_gtt_detach_pages()
247 struct drm_device *dev = gt->gem.dev; in psb_gtt_pin()
285 struct drm_device *dev = gt->gem.dev; in psb_gtt_unpin()
359 gt->gem.dev = dev; in psb_gtt_alloc_range()
Dframebuffer.c324 drm_gem_private_object_init(dev, &backing->gem, aligned_size); in psbfb_alloc()
485 drm_gem_object_unreference(&backing->gem); in psbfb_create()
516 r = container_of(obj, struct gtt_range, gem); in psb_user_framebuffer_create()
588 drm_gem_object_unreference(&psbfb->gtt->gem); in psb_fbdev_destroy()
668 return drm_gem_handle_create(file_priv, &r->gem, handle); in psb_user_framebuffer_create_handle()
686 drm_gem_object_unreference_unlocked(&r->gem); in psb_user_framebuffer_destroy()
Dgtt.h47 struct drm_gem_object gem; /* GEM high level stuff */ member
DMakefile10 gem.o \
Dgma_display.c363 struct gtt_range, gem); in gma_crtc_cursor_set()
392 gt = container_of(obj, struct gtt_range, gem); in gma_crtc_cursor_set()
442 gt = container_of(gma_crtc->cursor_obj, struct gtt_range, gem); in gma_crtc_cursor_set()
/linux-4.1.27/drivers/net/ethernet/sun/
Dsungem.c118 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read()
146 struct gem *gp = netdev_priv(dev); in _sungem_phy_read()
150 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read()
155 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write()
179 struct gem *gp = netdev_priv(dev); in _sungem_phy_write()
183 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write()
188 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints()
194 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints()
201 static void gem_get_cell(struct gem *gp) in gem_get_cell()
215 static void gem_put_cell(struct gem *gp) in gem_put_cell()
[all …]
Dsungem.h975 struct gem { struct
/linux-4.1.27/Documentation/devicetree/bindings/net/
Dmacb.txt4 - compatible: Should be "cdns,[<chip>-]{macb|gem}"
8 Use "cdns,pc302-gem" for Picochip picoXcell pc302 and later devices based on
9 the Cadence GEM, or the generic form: "cdns,gem".
10 Use "cdns,sama5d3-gem" for the Gigabit IP available on Atmel sama5d3 SoCs.
11 Use "cdns,sama5d4-gem" for the Gigabit IP available on Atmel sama5d4 SoCs.
Dcdns-emac.txt6 Use "cdns,zynq-gem" Xilinx Zynq-7xxx SoC.
/linux-4.1.27/drivers/gpu/drm/qxl/
Dqxl_gem.c62 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create()
63 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create()
64 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create()
110 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
Dqxl_object.c39 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy()
41 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy()
274 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete()
277 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete()
282 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete()
284 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
Dqxl_debugfs.c60 list_for_each_entry(bo, &qdev->gem.objects, list) { in qxl_debugfs_buffers_info()
Dqxl_kms.c130 mutex_init(&qdev->gem.mutex); in qxl_device_init()
134 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_device_init()
Dqxl_drv.h263 struct qxl_gem gem; member
/linux-4.1.27/drivers/gpu/drm/bochs/
Dbochs.h103 struct drm_gem_object gem; member
113 static inline struct bochs_bo *gem_to_bochs_bo(struct drm_gem_object *gem) in gem_to_bochs_bo() argument
115 return container_of(gem, struct bochs_bo, gem); in gem_to_bochs_bo()
Dbochs_mm.c79 drm_gem_object_release(&bo->gem); in bochs_bo_ttm_destroy()
131 return drm_vma_node_verify_access(&bochsbo->gem.vma_node, filp); in bochs_bo_verify_access()
363 ret = drm_gem_object_init(dev, &bochsbo->gem, size); in bochs_bo_create()
406 *obj = &bochsbo->gem; in bochs_gem_create()
/linux-4.1.27/drivers/gpu/drm/radeon/
Dradeon_prime.c76 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
77 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table()
78 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
Dradeon_gem.c86 mutex_lock(&rdev->gem.mutex); in radeon_gem_object_create()
87 list_add_tail(&robj->list, &rdev->gem.objects); in radeon_gem_object_create()
88 mutex_unlock(&rdev->gem.mutex); in radeon_gem_object_create()
128 INIT_LIST_HEAD(&rdev->gem.objects); in radeon_gem_init()
771 mutex_lock(&rdev->gem.mutex); in radeon_debugfs_gem_info()
772 list_for_each_entry(rbo, &rdev->gem.objects, list) { in radeon_debugfs_gem_info()
794 mutex_unlock(&rdev->gem.mutex); in radeon_debugfs_gem_info()
Dradeon_object.c80 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
82 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy()
424 if (list_empty(&rdev->gem.objects)) { in radeon_bo_force_delete()
428 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_bo_force_delete()
433 mutex_lock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
435 mutex_unlock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
Dradeon_pm.c148 if (list_empty(&rdev->gem.objects)) in radeon_unmap_vram_bos()
151 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_unmap_vram_bos()
Dradeon_device.c1305 mutex_init(&rdev->gem.mutex); in radeon_device_init()
Dradeon.h2386 struct radeon_gem gem; member
/linux-4.1.27/drivers/gpu/drm/shmobile/
Dshmob_drm_plane.c49 struct drm_gem_cma_object *gem; in shmob_drm_plane_compute_base() local
53 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base()
54 splane->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_plane_compute_base()
59 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base()
60 splane->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_plane_compute_base()
Dshmob_drm_crtc.c309 struct drm_gem_cma_object *gem; in shmob_drm_crtc_compute_base() local
313 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_crtc_compute_base()
314 scrtc->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_crtc_compute_base()
319 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_crtc_compute_base()
320 scrtc->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_crtc_compute_base()
/linux-4.1.27/drivers/gpu/drm/rcar-du/
Drcar_du_plane.c54 struct drm_gem_cma_object *gem; in rcar_du_plane_setup_fb() local
90 gem = drm_fb_cma_get_gem_obj(fb, 0); in rcar_du_plane_setup_fb()
91 rcar_du_plane_write(rgrp, index, PnDSA0R, gem->paddr + fb->offsets[0]); in rcar_du_plane_setup_fb()
102 gem = drm_fb_cma_get_gem_obj(fb, 1); in rcar_du_plane_setup_fb()
104 gem->paddr + fb->offsets[1]); in rcar_du_plane_setup_fb()
/linux-4.1.27/arch/arm64/boot/dts/xilinx/
Dzynqmp.dtsi176 compatible = "cdns,gem";
188 compatible = "cdns,gem";
200 compatible = "cdns,gem";
212 compatible = "cdns,gem";
/linux-4.1.27/drivers/gpu/drm/cirrus/
Dcirrus_drv.h167 struct drm_gem_object gem; member
171 #define gem_to_cirrus_bo(gobj) container_of((gobj), struct cirrus_bo, gem)
Dcirrus_ttm.c101 drm_gem_object_release(&bo->gem); in cirrus_bo_ttm_destroy()
153 return drm_vma_node_verify_access(&cirrusbo->gem.vma_node, filp); in cirrus_bo_verify_access()
330 ret = drm_gem_object_init(dev, &cirrusbo->gem, size); in cirrus_bo_create()
Dcirrus_main.c235 *obj = &cirrusbo->gem; in cirrus_gem_create()
/linux-4.1.27/drivers/gpu/drm/mgag200/
Dmgag200_ttm.c101 drm_gem_object_release(&bo->gem); in mgag200_bo_ttm_destroy()
153 return drm_vma_node_verify_access(&mgabo->gem.vma_node, filp); in mgag200_bo_verify_access()
326 ret = drm_gem_object_init(dev, &mgabo->gem, size); in mgag200_bo_create()
Dmgag200_drv.h226 struct drm_gem_object gem; member
230 #define gem_to_mga_bo(gobj) container_of((gobj), struct mgag200_bo, gem)
Dmgag200_main.c287 *obj = &astbo->gem; in mgag200_gem_create()
/linux-4.1.27/drivers/gpu/drm/ast/
Dast_ttm.c101 drm_gem_object_release(&bo->gem); in ast_bo_ttm_destroy()
153 return drm_vma_node_verify_access(&astbo->gem.vma_node, filp); in ast_bo_verify_access()
326 ret = drm_gem_object_init(dev, &astbo->gem, size); in ast_bo_create()
Dast_drv.h323 struct drm_gem_object gem; member
327 #define gem_to_ast_bo(gobj) container_of((gobj), struct ast_bo, gem)
Dast_main.c514 *obj = &astbo->gem; in ast_gem_create()
/linux-4.1.27/arch/arm/boot/dts/
Dpicoxcell-pc3x2.dtsi54 emac: gem@30000 {
55 compatible = "cadence,gem";
Dpicoxcell-pc3x3.dtsi150 emac: gem@30000 {
151 compatible = "cadence,gem";
Dzynq-7000.dtsi196 compatible = "cdns,zynq-gem";
207 compatible = "cdns,zynq-gem";
Dsama5d3_gmac.dtsi77 compatible = "atmel,sama5d3-gem";
Dsama5d4.dtsi944 compatible = "atmel,sama5d4-gem";
/linux-4.1.27/drivers/gpu/drm/tilcdc/
Dtilcdc_crtc.c89 struct drm_gem_cma_object *gem; in update_scanout() local
93 gem = drm_fb_cma_get_gem_obj(fb, 0); in update_scanout()
95 tilcdc_crtc->start = gem->paddr + fb->offsets[0] + in update_scanout()
/linux-4.1.27/drivers/gpu/drm/atmel-hlcdc/
Datmel_hlcdc_layer.c460 struct drm_gem_cma_object *gem; in atmel_hlcdc_layer_update_set_fb() local
463 gem = drm_fb_cma_get_gem_obj(fb, i); in atmel_hlcdc_layer_update_set_fb()
464 dscr->addr = gem->paddr + offsets[i]; in atmel_hlcdc_layer_update_set_fb()
/linux-4.1.27/drivers/gpu/drm/nouveau/dispnv04/
Dcrtc.c994 struct drm_gem_object *gem; in nv04_crtc_cursor_set() local
1005 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle); in nv04_crtc_cursor_set()
1006 if (!gem) in nv04_crtc_cursor_set()
1008 cursor = nouveau_gem_object(gem); in nv04_crtc_cursor_set()
1024 drm_gem_object_unreference_unlocked(gem); in nv04_crtc_cursor_set()
/linux-4.1.27/drivers/net/ethernet/cadence/
Dmacb.h804 struct gem_stats gem; member
Dmacb.c1061 bp->hw_stats.gem.rx_overruns++; in macb_interrupt()
1871 u32 *p = &bp->hw_stats.gem.tx_octets_31_0; in gem_update_stats()
1891 struct gem_stats *hwstat = &bp->hw_stats.gem; in gem_get_stats()
/linux-4.1.27/Documentation/
Ddma-buf-sharing.txt457 callback. In the specific case of a gem driver the exporter could use the
458 shmem file already provided by gem (and set vm_pgoff = 0). Exporters can then