/linux-4.1.27/drivers/gpu/drm/tegra/ |
D | gem.c | 31 struct drm_device *drm = obj->gem.dev; in tegra_bo_put() 34 drm_gem_object_unreference(&obj->gem); in tegra_bo_put() 75 struct drm_device *drm = obj->gem.dev; in tegra_bo_get() 78 drm_gem_object_reference(&obj->gem); in tegra_bo_get() 107 err = drm_mm_insert_node_generic(&tegra->mm, bo->mm, bo->gem.size, in tegra_bo_iommu_map() 160 err = drm_gem_object_init(drm, &bo->gem, size); in tegra_bo_alloc_object() 164 err = drm_gem_create_mmap_offset(&bo->gem); in tegra_bo_alloc_object() 171 drm_gem_object_release(&bo->gem); in tegra_bo_alloc_object() 180 drm_gem_put_pages(&bo->gem, bo->pages, true, true); in tegra_bo_free() 184 dma_free_writecombine(drm->dev, bo->gem.size, bo->vaddr, in tegra_bo_free() [all …]
|
D | gem.h | 34 struct drm_gem_object gem; member 50 static inline struct tegra_bo *to_tegra_bo(struct drm_gem_object *gem) in to_tegra_bo() argument 52 return container_of(gem, struct tegra_bo, gem); in to_tegra_bo() 62 void tegra_bo_free_object(struct drm_gem_object *gem); 73 struct drm_gem_object *gem,
|
D | drm.c | 264 struct drm_gem_object *gem; in host1x_bo_lookup() local 267 gem = drm_gem_object_lookup(drm, file, handle); in host1x_bo_lookup() 268 if (!gem) in host1x_bo_lookup() 272 drm_gem_object_unreference(gem); in host1x_bo_lookup() 275 bo = to_tegra_bo(gem); in host1x_bo_lookup() 456 struct drm_gem_object *gem; in tegra_gem_mmap() local 459 gem = drm_gem_object_lookup(drm, file, args->handle); in tegra_gem_mmap() 460 if (!gem) in tegra_gem_mmap() 463 bo = to_tegra_bo(gem); in tegra_gem_mmap() 465 args->offset = drm_vma_node_offset_addr(&bo->gem.vma_node); in tegra_gem_mmap() [all …]
|
D | fb.c | 72 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fb_destroy() 86 return drm_gem_handle_create(file, &fb->planes[0]->gem, handle); in tegra_fb_create_handle() 138 struct drm_gem_object *gem; in tegra_fb_create() local 150 gem = drm_gem_object_lookup(drm, file, cmd->handles[i]); in tegra_fb_create() 151 if (!gem) { in tegra_fb_create() 161 if (gem->size < size) { in tegra_fb_create() 166 planes[i] = to_tegra_bo(gem); in tegra_fb_create() 179 drm_gem_object_unreference_unlocked(&planes[i]->gem); in tegra_fb_create() 230 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe() 239 drm_gem_object_unreference_unlocked(&bo->gem); in tegra_fbdev_probe()
|
D | Makefile | 5 gem.o \
|
/linux-4.1.27/drivers/gpu/drm/nouveau/ |
D | nouveau_gem.c | 36 nouveau_gem_object_del(struct drm_gem_object *gem) in nouveau_gem_object_del() argument 38 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_del() 48 if (gem->import_attach) in nouveau_gem_object_del() 49 drm_prime_gem_destroy(gem, nvbo->bo.sg); in nouveau_gem_object_del() 51 drm_gem_object_release(gem); in nouveau_gem_object_del() 54 gem->filp = NULL; in nouveau_gem_object_del() 62 nouveau_gem_object_open(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_open() argument 65 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_open() 145 nouveau_gem_object_close(struct drm_gem_object *gem, struct drm_file *file_priv) in nouveau_gem_object_close() argument 148 struct nouveau_bo *nvbo = nouveau_gem_object(gem); in nouveau_gem_object_close() [all …]
|
D | nouveau_gem.h | 13 nouveau_gem_object(struct drm_gem_object *gem) in nouveau_gem_object() argument 15 return gem ? container_of(gem, struct nouveau_bo, gem) : NULL; in nouveau_gem_object()
|
D | nouveau_display.c | 222 drm_gem_object_unreference_unlocked(&fb->nvbo->gem); in nouveau_user_framebuffer_destroy() 235 return drm_gem_handle_create(file_priv, &fb->nvbo->gem, handle); in nouveau_user_framebuffer_create_handle() 275 struct drm_gem_object *gem; in nouveau_user_framebuffer_create() local 278 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]); in nouveau_user_framebuffer_create() 279 if (!gem) in nouveau_user_framebuffer_create() 286 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem)); in nouveau_user_framebuffer_create() 295 drm_gem_object_unreference(gem); in nouveau_user_framebuffer_create() 889 ret = drm_gem_handle_create(file_priv, &bo->gem, &args->handle); in nouveau_display_dumb_create() 890 drm_gem_object_unreference_unlocked(&bo->gem); in nouveau_display_dumb_create() 899 struct drm_gem_object *gem; in nouveau_display_dumb_map_offset() local [all …]
|
D | nouveau_ttm.c | 393 drm->gem.vram_available = drm->device.info.ram_user; in nouveau_ttm_init() 396 drm->gem.vram_available >> PAGE_SHIFT); in nouveau_ttm_init() 407 drm->gem.gart_available = nvxx_mmu(&drm->device)->limit; in nouveau_ttm_init() 409 drm->gem.gart_available = drm->agp.size; in nouveau_ttm_init() 413 drm->gem.gart_available >> PAGE_SHIFT); in nouveau_ttm_init() 419 NV_INFO(drm, "VRAM: %d MiB\n", (u32)(drm->gem.vram_available >> 20)); in nouveau_ttm_init() 420 NV_INFO(drm, "GART: %d MiB\n", (u32)(drm->gem.gart_available >> 20)); in nouveau_ttm_init()
|
D | nouveau_prime.c | 81 ret = drm_gem_object_init(dev, &nvbo->gem, nvbo->bo.mem.size); in nouveau_gem_prime_import_sg_table() 87 return &nvbo->gem; in nouveau_gem_prime_import_sg_table()
|
D | nouveau_abi16.c | 127 drm_gem_object_unreference_unlocked(&chan->ntfy->gem); in nouveau_abi16_chan_fini() 200 getparam->value = drm->gem.vram_available; in nouveau_abi16_ioctl_getparam() 203 getparam->value = drm->gem.gart_available; in nouveau_abi16_ioctl_getparam() 322 ret = drm_gem_handle_create(file_priv, &chan->ntfy->gem, in nouveau_abi16_ioctl_channel_alloc()
|
D | nouveau_bo.h | 36 struct drm_gem_object gem; member
|
D | nouveau_drm.h | 139 } gem; member
|
D | nouveau_bo.c | 140 if (unlikely(nvbo->gem.filp)) in nouveau_bo_del_ttm() 370 drm->gem.vram_available -= bo->mem.size; in nouveau_bo_pin() 373 drm->gem.gart_available -= bo->mem.size; in nouveau_bo_pin() 408 drm->gem.vram_available += bo->mem.size; in nouveau_bo_unpin() 411 drm->gem.gart_available += bo->mem.size; in nouveau_bo_unpin() 1351 return drm_vma_node_verify_access(&nvbo->gem.vma_node, filp); in nouveau_bo_verify_access()
|
D | nv50_display.c | 1283 struct drm_gem_object *gem = NULL; in nv50_crtc_cursor_set() local 1291 gem = drm_gem_object_lookup(dev, file_priv, handle); in nv50_crtc_cursor_set() 1292 if (unlikely(!gem)) in nv50_crtc_cursor_set() 1294 nvbo = nouveau_gem_object(gem); in nv50_crtc_cursor_set() 1304 drm_gem_object_unreference_unlocked(gem); in nv50_crtc_cursor_set()
|
D | nouveau_fbcon.c | 491 drm_gem_object_unreference_unlocked(&nouveau_fb->nvbo->gem); in nouveau_fbcon_destroy()
|
/linux-4.1.27/drivers/gpu/drm/gma500/ |
D | gem.c | 34 struct gtt_range *gtt = container_of(obj, struct gtt_range, gem); in psb_gem_free_object() 115 if (drm_gem_object_init(dev, &r->gem, size) != 0) { in psb_gem_create() 122 mapping_set_gfp_mask(r->gem.filp->f_mapping, GFP_KERNEL | __GFP_DMA32); in psb_gem_create() 124 ret = drm_gem_handle_create(file, &r->gem, &handle); in psb_gem_create() 127 &r->gem, size); in psb_gem_create() 128 drm_gem_object_release(&r->gem); in psb_gem_create() 133 drm_gem_object_unreference_unlocked(&r->gem); in psb_gem_create() 188 r = container_of(obj, struct gtt_range, gem); /* Get the gtt range */ in psb_gem_fault()
|
D | gtt.c | 209 pages = drm_gem_get_pages(>->gem); in psb_gtt_attach_pages() 213 gt->npage = gt->gem.size / PAGE_SIZE; in psb_gtt_attach_pages() 230 drm_gem_put_pages(>->gem, gt->pages, true, false); in psb_gtt_detach_pages() 247 struct drm_device *dev = gt->gem.dev; in psb_gtt_pin() 285 struct drm_device *dev = gt->gem.dev; in psb_gtt_unpin() 359 gt->gem.dev = dev; in psb_gtt_alloc_range()
|
D | framebuffer.c | 324 drm_gem_private_object_init(dev, &backing->gem, aligned_size); in psbfb_alloc() 485 drm_gem_object_unreference(&backing->gem); in psbfb_create() 516 r = container_of(obj, struct gtt_range, gem); in psb_user_framebuffer_create() 588 drm_gem_object_unreference(&psbfb->gtt->gem); in psb_fbdev_destroy() 668 return drm_gem_handle_create(file_priv, &r->gem, handle); in psb_user_framebuffer_create_handle() 686 drm_gem_object_unreference_unlocked(&r->gem); in psb_user_framebuffer_destroy()
|
D | gtt.h | 47 struct drm_gem_object gem; /* GEM high level stuff */ member
|
D | Makefile | 10 gem.o \
|
D | gma_display.c | 363 struct gtt_range, gem); in gma_crtc_cursor_set() 392 gt = container_of(obj, struct gtt_range, gem); in gma_crtc_cursor_set() 442 gt = container_of(gma_crtc->cursor_obj, struct gtt_range, gem); in gma_crtc_cursor_set()
|
/linux-4.1.27/drivers/net/ethernet/sun/ |
D | sungem.c | 118 static u16 __sungem_phy_read(struct gem *gp, int phy_addr, int reg) in __sungem_phy_read() 146 struct gem *gp = netdev_priv(dev); in _sungem_phy_read() 150 static inline u16 sungem_phy_read(struct gem *gp, int reg) in sungem_phy_read() 155 static void __sungem_phy_write(struct gem *gp, int phy_addr, int reg, u16 val) in __sungem_phy_write() 179 struct gem *gp = netdev_priv(dev); in _sungem_phy_write() 183 static inline void sungem_phy_write(struct gem *gp, int reg, u16 val) in sungem_phy_write() 188 static inline void gem_enable_ints(struct gem *gp) in gem_enable_ints() 194 static inline void gem_disable_ints(struct gem *gp) in gem_disable_ints() 201 static void gem_get_cell(struct gem *gp) in gem_get_cell() 215 static void gem_put_cell(struct gem *gp) in gem_put_cell() [all …]
|
D | sungem.h | 975 struct gem { struct
|
/linux-4.1.27/Documentation/devicetree/bindings/net/ |
D | macb.txt | 4 - compatible: Should be "cdns,[<chip>-]{macb|gem}" 8 Use "cdns,pc302-gem" for Picochip picoXcell pc302 and later devices based on 9 the Cadence GEM, or the generic form: "cdns,gem". 10 Use "cdns,sama5d3-gem" for the Gigabit IP available on Atmel sama5d3 SoCs. 11 Use "cdns,sama5d4-gem" for the Gigabit IP available on Atmel sama5d4 SoCs.
|
D | cdns-emac.txt | 6 Use "cdns,zynq-gem" Xilinx Zynq-7xxx SoC.
|
/linux-4.1.27/drivers/gpu/drm/qxl/ |
D | qxl_gem.c | 62 mutex_lock(&qdev->gem.mutex); in qxl_gem_object_create() 63 list_add_tail(&qbo->list, &qdev->gem.objects); in qxl_gem_object_create() 64 mutex_unlock(&qdev->gem.mutex); in qxl_gem_object_create() 110 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_gem_init()
|
D | qxl_object.c | 39 mutex_lock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 41 mutex_unlock(&qdev->gem.mutex); in qxl_ttm_bo_destroy() 274 if (list_empty(&qdev->gem.objects)) in qxl_bo_force_delete() 277 list_for_each_entry_safe(bo, n, &qdev->gem.objects, list) { in qxl_bo_force_delete() 282 mutex_lock(&qdev->gem.mutex); in qxl_bo_force_delete() 284 mutex_unlock(&qdev->gem.mutex); in qxl_bo_force_delete()
|
D | qxl_debugfs.c | 60 list_for_each_entry(bo, &qdev->gem.objects, list) { in qxl_debugfs_buffers_info()
|
D | qxl_kms.c | 130 mutex_init(&qdev->gem.mutex); in qxl_device_init() 134 INIT_LIST_HEAD(&qdev->gem.objects); in qxl_device_init()
|
D | qxl_drv.h | 263 struct qxl_gem gem; member
|
/linux-4.1.27/drivers/gpu/drm/bochs/ |
D | bochs.h | 103 struct drm_gem_object gem; member 113 static inline struct bochs_bo *gem_to_bochs_bo(struct drm_gem_object *gem) in gem_to_bochs_bo() argument 115 return container_of(gem, struct bochs_bo, gem); in gem_to_bochs_bo()
|
D | bochs_mm.c | 79 drm_gem_object_release(&bo->gem); in bochs_bo_ttm_destroy() 131 return drm_vma_node_verify_access(&bochsbo->gem.vma_node, filp); in bochs_bo_verify_access() 363 ret = drm_gem_object_init(dev, &bochsbo->gem, size); in bochs_bo_create() 406 *obj = &bochsbo->gem; in bochs_gem_create()
|
/linux-4.1.27/drivers/gpu/drm/radeon/ |
D | radeon_prime.c | 76 mutex_lock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table() 77 list_add_tail(&bo->list, &rdev->gem.objects); in radeon_gem_prime_import_sg_table() 78 mutex_unlock(&rdev->gem.mutex); in radeon_gem_prime_import_sg_table()
|
D | radeon_gem.c | 86 mutex_lock(&rdev->gem.mutex); in radeon_gem_object_create() 87 list_add_tail(&robj->list, &rdev->gem.objects); in radeon_gem_object_create() 88 mutex_unlock(&rdev->gem.mutex); in radeon_gem_object_create() 128 INIT_LIST_HEAD(&rdev->gem.objects); in radeon_gem_init() 771 mutex_lock(&rdev->gem.mutex); in radeon_debugfs_gem_info() 772 list_for_each_entry(rbo, &rdev->gem.objects, list) { in radeon_debugfs_gem_info() 794 mutex_unlock(&rdev->gem.mutex); in radeon_debugfs_gem_info()
|
D | radeon_object.c | 80 mutex_lock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 82 mutex_unlock(&bo->rdev->gem.mutex); in radeon_ttm_bo_destroy() 424 if (list_empty(&rdev->gem.objects)) { in radeon_bo_force_delete() 428 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_bo_force_delete() 433 mutex_lock(&bo->rdev->gem.mutex); in radeon_bo_force_delete() 435 mutex_unlock(&bo->rdev->gem.mutex); in radeon_bo_force_delete()
|
D | radeon_pm.c | 148 if (list_empty(&rdev->gem.objects)) in radeon_unmap_vram_bos() 151 list_for_each_entry_safe(bo, n, &rdev->gem.objects, list) { in radeon_unmap_vram_bos()
|
D | radeon_device.c | 1305 mutex_init(&rdev->gem.mutex); in radeon_device_init()
|
D | radeon.h | 2386 struct radeon_gem gem; member
|
/linux-4.1.27/drivers/gpu/drm/shmobile/ |
D | shmob_drm_plane.c | 49 struct drm_gem_cma_object *gem; in shmob_drm_plane_compute_base() local 53 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_plane_compute_base() 54 splane->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_plane_compute_base() 59 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_plane_compute_base() 60 splane->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_plane_compute_base()
|
D | shmob_drm_crtc.c | 309 struct drm_gem_cma_object *gem; in shmob_drm_crtc_compute_base() local 313 gem = drm_fb_cma_get_gem_obj(fb, 0); in shmob_drm_crtc_compute_base() 314 scrtc->dma[0] = gem->paddr + fb->offsets[0] in shmob_drm_crtc_compute_base() 319 gem = drm_fb_cma_get_gem_obj(fb, 1); in shmob_drm_crtc_compute_base() 320 scrtc->dma[1] = gem->paddr + fb->offsets[1] in shmob_drm_crtc_compute_base()
|
/linux-4.1.27/drivers/gpu/drm/rcar-du/ |
D | rcar_du_plane.c | 54 struct drm_gem_cma_object *gem; in rcar_du_plane_setup_fb() local 90 gem = drm_fb_cma_get_gem_obj(fb, 0); in rcar_du_plane_setup_fb() 91 rcar_du_plane_write(rgrp, index, PnDSA0R, gem->paddr + fb->offsets[0]); in rcar_du_plane_setup_fb() 102 gem = drm_fb_cma_get_gem_obj(fb, 1); in rcar_du_plane_setup_fb() 104 gem->paddr + fb->offsets[1]); in rcar_du_plane_setup_fb()
|
/linux-4.1.27/arch/arm64/boot/dts/xilinx/ |
D | zynqmp.dtsi | 176 compatible = "cdns,gem"; 188 compatible = "cdns,gem"; 200 compatible = "cdns,gem"; 212 compatible = "cdns,gem";
|
/linux-4.1.27/drivers/gpu/drm/cirrus/ |
D | cirrus_drv.h | 167 struct drm_gem_object gem; member 171 #define gem_to_cirrus_bo(gobj) container_of((gobj), struct cirrus_bo, gem)
|
D | cirrus_ttm.c | 101 drm_gem_object_release(&bo->gem); in cirrus_bo_ttm_destroy() 153 return drm_vma_node_verify_access(&cirrusbo->gem.vma_node, filp); in cirrus_bo_verify_access() 330 ret = drm_gem_object_init(dev, &cirrusbo->gem, size); in cirrus_bo_create()
|
D | cirrus_main.c | 235 *obj = &cirrusbo->gem; in cirrus_gem_create()
|
/linux-4.1.27/drivers/gpu/drm/mgag200/ |
D | mgag200_ttm.c | 101 drm_gem_object_release(&bo->gem); in mgag200_bo_ttm_destroy() 153 return drm_vma_node_verify_access(&mgabo->gem.vma_node, filp); in mgag200_bo_verify_access() 326 ret = drm_gem_object_init(dev, &mgabo->gem, size); in mgag200_bo_create()
|
D | mgag200_drv.h | 226 struct drm_gem_object gem; member 230 #define gem_to_mga_bo(gobj) container_of((gobj), struct mgag200_bo, gem)
|
D | mgag200_main.c | 287 *obj = &astbo->gem; in mgag200_gem_create()
|
/linux-4.1.27/drivers/gpu/drm/ast/ |
D | ast_ttm.c | 101 drm_gem_object_release(&bo->gem); in ast_bo_ttm_destroy() 153 return drm_vma_node_verify_access(&astbo->gem.vma_node, filp); in ast_bo_verify_access() 326 ret = drm_gem_object_init(dev, &astbo->gem, size); in ast_bo_create()
|
D | ast_drv.h | 323 struct drm_gem_object gem; member 327 #define gem_to_ast_bo(gobj) container_of((gobj), struct ast_bo, gem)
|
D | ast_main.c | 514 *obj = &astbo->gem; in ast_gem_create()
|
/linux-4.1.27/arch/arm/boot/dts/ |
D | picoxcell-pc3x2.dtsi | 54 emac: gem@30000 { 55 compatible = "cadence,gem";
|
D | picoxcell-pc3x3.dtsi | 150 emac: gem@30000 { 151 compatible = "cadence,gem";
|
D | zynq-7000.dtsi | 196 compatible = "cdns,zynq-gem"; 207 compatible = "cdns,zynq-gem";
|
D | sama5d3_gmac.dtsi | 77 compatible = "atmel,sama5d3-gem";
|
D | sama5d4.dtsi | 944 compatible = "atmel,sama5d4-gem";
|
/linux-4.1.27/drivers/gpu/drm/tilcdc/ |
D | tilcdc_crtc.c | 89 struct drm_gem_cma_object *gem; in update_scanout() local 93 gem = drm_fb_cma_get_gem_obj(fb, 0); in update_scanout() 95 tilcdc_crtc->start = gem->paddr + fb->offsets[0] + in update_scanout()
|
/linux-4.1.27/drivers/gpu/drm/atmel-hlcdc/ |
D | atmel_hlcdc_layer.c | 460 struct drm_gem_cma_object *gem; in atmel_hlcdc_layer_update_set_fb() local 463 gem = drm_fb_cma_get_gem_obj(fb, i); in atmel_hlcdc_layer_update_set_fb() 464 dscr->addr = gem->paddr + offsets[i]; in atmel_hlcdc_layer_update_set_fb()
|
/linux-4.1.27/drivers/gpu/drm/nouveau/dispnv04/ |
D | crtc.c | 994 struct drm_gem_object *gem; in nv04_crtc_cursor_set() local 1005 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle); in nv04_crtc_cursor_set() 1006 if (!gem) in nv04_crtc_cursor_set() 1008 cursor = nouveau_gem_object(gem); in nv04_crtc_cursor_set() 1024 drm_gem_object_unreference_unlocked(gem); in nv04_crtc_cursor_set()
|
/linux-4.1.27/drivers/net/ethernet/cadence/ |
D | macb.h | 804 struct gem_stats gem; member
|
D | macb.c | 1061 bp->hw_stats.gem.rx_overruns++; in macb_interrupt() 1871 u32 *p = &bp->hw_stats.gem.tx_octets_31_0; in gem_update_stats() 1891 struct gem_stats *hwstat = &bp->hw_stats.gem; in gem_get_stats()
|
/linux-4.1.27/Documentation/ |
D | dma-buf-sharing.txt | 457 callback. In the specific case of a gem driver the exporter could use the 458 shmem file already provided by gem (and set vm_pgoff = 0). Exporters can then
|