shmem 728 drivers/firmware/arm_scmi/driver.c struct device_node *shmem, *np = dev->of_node; shmem 758 drivers/firmware/arm_scmi/driver.c shmem = of_parse_phandle(np, "shmem", idx); shmem 759 drivers/firmware/arm_scmi/driver.c ret = of_address_to_resource(shmem, 0, &res); shmem 760 drivers/firmware/arm_scmi/driver.c of_node_put(shmem); shmem 934 drivers/firmware/arm_scpi.c struct device_node *shmem = of_parse_phandle(np, "shmem", idx); shmem 936 drivers/firmware/arm_scpi.c ret = of_address_to_resource(shmem, 0, &res); shmem 937 drivers/firmware/arm_scpi.c of_node_put(shmem); shmem 51 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem; shmem 60 drivers/gpu/drm/drm_gem_shmem_helper.c obj = kzalloc(sizeof(*shmem), GFP_KERNEL); shmem 75 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = to_drm_gem_shmem_obj(obj); shmem 76 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_init(&shmem->pages_lock); shmem 77 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_init(&shmem->vmap_lock); shmem 78 drivers/gpu/drm/drm_gem_shmem_helper.c INIT_LIST_HEAD(&shmem->madv_list); shmem 90 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem; shmem 110 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 112 drivers/gpu/drm/drm_gem_shmem_helper.c WARN_ON(shmem->vmap_use_count); shmem 115 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages_use_count--; shmem 116 drivers/gpu/drm/drm_gem_shmem_helper.c drm_prime_gem_destroy(obj, shmem->sgt); shmem 117 drivers/gpu/drm/drm_gem_shmem_helper.c kvfree(shmem->pages); shmem 119 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->sgt) { shmem 120 drivers/gpu/drm/drm_gem_shmem_helper.c dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, shmem 121 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt->nents, DMA_BIDIRECTIONAL); shmem 122 drivers/gpu/drm/drm_gem_shmem_helper.c sg_free_table(shmem->sgt); shmem 123 drivers/gpu/drm/drm_gem_shmem_helper.c kfree(shmem->sgt); shmem 125 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->pages) shmem 126 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 129 drivers/gpu/drm/drm_gem_shmem_helper.c WARN_ON(shmem->pages_use_count); shmem 132 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_destroy(&shmem->pages_lock); shmem 133 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_destroy(&shmem->vmap_lock); shmem 134 drivers/gpu/drm/drm_gem_shmem_helper.c kfree(shmem); shmem 138 drivers/gpu/drm/drm_gem_shmem_helper.c static int drm_gem_shmem_get_pages_locked(struct drm_gem_shmem_object *shmem) shmem 140 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; shmem 143 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->pages_use_count++ > 0) shmem 149 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages_use_count = 0; shmem 153 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages = pages; shmem 168 drivers/gpu/drm/drm_gem_shmem_helper.c int drm_gem_shmem_get_pages(struct drm_gem_shmem_object *shmem) shmem 172 drivers/gpu/drm/drm_gem_shmem_helper.c ret = mutex_lock_interruptible(&shmem->pages_lock); shmem 175 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_shmem_get_pages_locked(shmem); shmem 176 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->pages_lock); shmem 182 drivers/gpu/drm/drm_gem_shmem_helper.c static void drm_gem_shmem_put_pages_locked(struct drm_gem_shmem_object *shmem) shmem 184 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; shmem 186 drivers/gpu/drm/drm_gem_shmem_helper.c if (WARN_ON_ONCE(!shmem->pages_use_count)) shmem 189 drivers/gpu/drm/drm_gem_shmem_helper.c if (--shmem->pages_use_count > 0) shmem 192 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_put_pages(obj, shmem->pages, shmem 193 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages_mark_dirty_on_put, shmem 194 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages_mark_accessed_on_put); shmem 195 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages = NULL; shmem 204 drivers/gpu/drm/drm_gem_shmem_helper.c void drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem) shmem 206 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_lock(&shmem->pages_lock); shmem 207 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages_locked(shmem); shmem 208 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->pages_lock); shmem 224 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 226 drivers/gpu/drm/drm_gem_shmem_helper.c return drm_gem_shmem_get_pages(shmem); shmem 239 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 241 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 245 drivers/gpu/drm/drm_gem_shmem_helper.c static void *drm_gem_shmem_vmap_locked(struct drm_gem_shmem_object *shmem) shmem 247 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; shmem 250 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->vmap_use_count++ > 0) shmem 251 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem->vaddr; shmem 253 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_shmem_get_pages(shmem); shmem 258 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vaddr = dma_buf_vmap(obj->import_attach->dmabuf); shmem 260 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vaddr = vmap(shmem->pages, obj->size >> PAGE_SHIFT, shmem 263 drivers/gpu/drm/drm_gem_shmem_helper.c if (!shmem->vaddr) { shmem 269 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem->vaddr; shmem 272 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 274 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vmap_use_count = 0; shmem 291 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 295 drivers/gpu/drm/drm_gem_shmem_helper.c ret = mutex_lock_interruptible(&shmem->vmap_lock); shmem 298 drivers/gpu/drm/drm_gem_shmem_helper.c vaddr = drm_gem_shmem_vmap_locked(shmem); shmem 299 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->vmap_lock); shmem 305 drivers/gpu/drm/drm_gem_shmem_helper.c static void drm_gem_shmem_vunmap_locked(struct drm_gem_shmem_object *shmem) shmem 307 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_object *obj = &shmem->base; shmem 309 drivers/gpu/drm/drm_gem_shmem_helper.c if (WARN_ON_ONCE(!shmem->vmap_use_count)) shmem 312 drivers/gpu/drm/drm_gem_shmem_helper.c if (--shmem->vmap_use_count > 0) shmem 316 drivers/gpu/drm/drm_gem_shmem_helper.c dma_buf_vunmap(obj->import_attach->dmabuf, shmem->vaddr); shmem 318 drivers/gpu/drm/drm_gem_shmem_helper.c vunmap(shmem->vaddr); shmem 320 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->vaddr = NULL; shmem 321 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 332 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 334 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_lock(&shmem->vmap_lock); shmem 335 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_vunmap_locked(shmem); shmem 336 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->vmap_lock); shmem 345 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem; shmem 348 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = drm_gem_shmem_create(dev, size); shmem 349 drivers/gpu/drm/drm_gem_shmem_helper.c if (IS_ERR(shmem)) shmem 350 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem; shmem 356 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_handle_create(file_priv, &shmem->base, handle); shmem 358 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_object_put_unlocked(&shmem->base); shmem 362 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem; shmem 371 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 373 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_lock(&shmem->pages_lock); shmem 375 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->madv >= 0) shmem 376 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->madv = madv; shmem 378 drivers/gpu/drm/drm_gem_shmem_helper.c madv = shmem->madv; shmem 380 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->pages_lock); shmem 389 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 391 drivers/gpu/drm/drm_gem_shmem_helper.c WARN_ON(!drm_gem_shmem_is_purgeable(shmem)); shmem 393 drivers/gpu/drm/drm_gem_shmem_helper.c dma_unmap_sg(obj->dev->dev, shmem->sgt->sgl, shmem 394 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt->nents, DMA_BIDIRECTIONAL); shmem 395 drivers/gpu/drm/drm_gem_shmem_helper.c sg_free_table(shmem->sgt); shmem 396 drivers/gpu/drm/drm_gem_shmem_helper.c kfree(shmem->sgt); shmem 397 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt = NULL; shmem 399 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages_locked(shmem); shmem 401 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->madv = -1; shmem 420 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 422 drivers/gpu/drm/drm_gem_shmem_helper.c if (!mutex_trylock(&shmem->pages_lock)) shmem 425 drivers/gpu/drm/drm_gem_shmem_helper.c mutex_unlock(&shmem->pages_lock); shmem 452 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem; shmem 465 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = drm_gem_shmem_create_with_handle(file, dev, args->size, &args->handle); shmem 467 drivers/gpu/drm/drm_gem_shmem_helper.c return PTR_ERR_OR_ZERO(shmem); shmem 475 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 479 drivers/gpu/drm/drm_gem_shmem_helper.c if (vmf->pgoff >= num_pages || WARN_ON_ONCE(!shmem->pages)) shmem 482 drivers/gpu/drm/drm_gem_shmem_helper.c page = shmem->pages[vmf->pgoff]; shmem 490 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 493 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_shmem_get_pages(shmem); shmem 502 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 504 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 533 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem; shmem 540 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = to_drm_gem_shmem_obj(vma->vm_private_data); shmem 542 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_shmem_get_pages(shmem); shmem 553 drivers/gpu/drm/drm_gem_shmem_helper.c vma->vm_pgoff -= drm_vma_node_start(&shmem->base.vma_node); shmem 568 drivers/gpu/drm/drm_gem_shmem_helper.c const struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 570 drivers/gpu/drm/drm_gem_shmem_helper.c drm_printf_indent(p, indent, "pages_use_count=%u\n", shmem->pages_use_count); shmem 571 drivers/gpu/drm/drm_gem_shmem_helper.c drm_printf_indent(p, indent, "vmap_use_count=%u\n", shmem->vmap_use_count); shmem 572 drivers/gpu/drm/drm_gem_shmem_helper.c drm_printf_indent(p, indent, "vaddr=%p\n", shmem->vaddr); shmem 589 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 591 drivers/gpu/drm/drm_gem_shmem_helper.c return drm_prime_pages_to_sg(shmem->pages, obj->size >> PAGE_SHIFT); shmem 610 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 613 drivers/gpu/drm/drm_gem_shmem_helper.c if (shmem->sgt) shmem 614 drivers/gpu/drm/drm_gem_shmem_helper.c return shmem->sgt; shmem 618 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_gem_shmem_get_pages(shmem); shmem 622 drivers/gpu/drm/drm_gem_shmem_helper.c sgt = drm_gem_shmem_get_sg_table(&shmem->base); shmem 630 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt = sgt; shmem 635 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_shmem_put_pages(shmem); shmem 662 drivers/gpu/drm/drm_gem_shmem_helper.c struct drm_gem_shmem_object *shmem; shmem 665 drivers/gpu/drm/drm_gem_shmem_helper.c shmem = drm_gem_shmem_create(dev, size); shmem 666 drivers/gpu/drm/drm_gem_shmem_helper.c if (IS_ERR(shmem)) shmem 667 drivers/gpu/drm/drm_gem_shmem_helper.c return ERR_CAST(shmem); shmem 669 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages = kvmalloc_array(npages, sizeof(struct page *), GFP_KERNEL); shmem 670 drivers/gpu/drm/drm_gem_shmem_helper.c if (!shmem->pages) { shmem 675 drivers/gpu/drm/drm_gem_shmem_helper.c ret = drm_prime_sg_to_page_addr_arrays(sgt, shmem->pages, NULL, npages); shmem 679 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->sgt = sgt; shmem 680 drivers/gpu/drm/drm_gem_shmem_helper.c shmem->pages_use_count = 1; /* Permanently pinned from our point of view */ shmem 684 drivers/gpu/drm/drm_gem_shmem_helper.c return &shmem->base; shmem 687 drivers/gpu/drm/drm_gem_shmem_helper.c kvfree(shmem->pages); shmem 689 drivers/gpu/drm/drm_gem_shmem_helper.c drm_gem_object_put_unlocked(&shmem->base); shmem 243 drivers/gpu/drm/panfrost/panfrost_gem.c struct drm_gem_shmem_object *shmem; shmem 250 drivers/gpu/drm/panfrost/panfrost_gem.c shmem = drm_gem_shmem_create(dev, size); shmem 251 drivers/gpu/drm/panfrost/panfrost_gem.c if (IS_ERR(shmem)) shmem 252 drivers/gpu/drm/panfrost/panfrost_gem.c return ERR_CAST(shmem); shmem 254 drivers/gpu/drm/panfrost/panfrost_gem.c bo = to_panfrost_bo(&shmem->base); shmem 262 drivers/gpu/drm/panfrost/panfrost_gem.c ret = drm_gem_handle_create(file_priv, &shmem->base, handle); shmem 264 drivers/gpu/drm/panfrost/panfrost_gem.c drm_gem_object_put_unlocked(&shmem->base); shmem 23 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c struct drm_gem_shmem_object *shmem; shmem 29 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c list_for_each_entry(shmem, &pfdev->shrinker_list, madv_list) { shmem 30 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c if (drm_gem_shmem_is_purgeable(shmem)) shmem 31 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c count += shmem->base.size >> PAGE_SHIFT; shmem 41 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c struct drm_gem_shmem_object *shmem = to_drm_gem_shmem_obj(obj); shmem 47 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c if (!mutex_trylock(&shmem->pages_lock)) shmem 53 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c mutex_unlock(&shmem->pages_lock); shmem 62 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c struct drm_gem_shmem_object *shmem, *tmp; shmem 68 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c list_for_each_entry_safe(shmem, tmp, &pfdev->shrinker_list, madv_list) { shmem 71 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c if (drm_gem_shmem_is_purgeable(shmem) && shmem 72 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c panfrost_gem_purge(&shmem->base)) { shmem 73 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c freed += shmem->base.size >> PAGE_SHIFT; shmem 74 drivers/gpu/drm/panfrost/panfrost_gem_shrinker.c list_del_init(&shmem->madv_list); shmem 126 drivers/net/arcnet/arc-rimi.c unsigned long first_mirror, last_mirror, shmem; shmem 146 drivers/net/arcnet/arc-rimi.c shmem = dev->mem_start; shmem 159 drivers/net/arcnet/arc-rimi.c check_mirror(shmem - MIRROR_SIZE, MIRROR_SIZE) == 0 && shmem 160 drivers/net/arcnet/arc-rimi.c check_mirror(shmem - 2 * MIRROR_SIZE, MIRROR_SIZE) == 1) shmem 163 drivers/net/arcnet/arc-rimi.c first_mirror = shmem - mirror_size; shmem 168 drivers/net/arcnet/arc-rimi.c last_mirror = shmem + mirror_size; shmem 194 drivers/net/arcnet/arc-rimi.c release_mem_region(shmem, MIRROR_SIZE); shmem 58 drivers/net/arcnet/com90xx.c static int com90xx_found(int ioaddr, int airq, u_long shmem, void __iomem *); shmem 88 drivers/net/arcnet/com90xx.c static int shmem; shmem 93 drivers/net/arcnet/com90xx.c module_param(shmem, int, 0); shmem 107 drivers/net/arcnet/com90xx.c if (!io && !irq && !shmem && !*device && com90xx_skip_probe) shmem 131 drivers/net/arcnet/com90xx.c if (shmem) shmem 132 drivers/net/arcnet/com90xx.c shmems[numshmems++] = shmem; shmem 460 drivers/net/arcnet/com90xx.c static int __init com90xx_found(int ioaddr, int airq, u_long shmem, shmem 473 drivers/net/arcnet/com90xx.c release_mem_region(shmem, MIRROR_SIZE); shmem 485 drivers/net/arcnet/com90xx.c check_mirror(shmem - MIRROR_SIZE, MIRROR_SIZE) == 0 && shmem 486 drivers/net/arcnet/com90xx.c check_mirror(shmem - 2 * MIRROR_SIZE, MIRROR_SIZE) == 1) shmem 489 drivers/net/arcnet/com90xx.c first_mirror = shmem - mirror_size; shmem 494 drivers/net/arcnet/com90xx.c last_mirror = shmem + mirror_size; shmem 503 drivers/net/arcnet/com90xx.c release_mem_region(shmem, MIRROR_SIZE); shmem 698 drivers/net/arcnet/com90xx.c shmem = ints[3]; shmem 746 drivers/net/ethernet/8390/mac8390.c long shmem = (start_page - WD_START_PG)<<8; shmem 748 drivers/net/ethernet/8390/mac8390.c memcpy_toio((void __iomem *)dev->mem_start + shmem, buf, count); shmem 788 drivers/net/ethernet/8390/mac8390.c long shmem = (start_page - WD_START_PG)<<8; shmem 790 drivers/net/ethernet/8390/mac8390.c dayna_memcpy_tocard(dev, shmem, buf, count); shmem 827 drivers/net/ethernet/8390/mac8390.c long shmem = (start_page - WD_START_PG)<<8; shmem 829 drivers/net/ethernet/8390/mac8390.c word_memcpy_tocard(dev->mem_start + shmem, buf, count); shmem 1393 drivers/net/ethernet/8390/pcnet_cs.c void __iomem *shmem = ei_status.mem + (start_page << 8); shmem 1394 drivers/net/ethernet/8390/pcnet_cs.c shmem -= ei_status.tx_start_page << 8; shmem 1395 drivers/net/ethernet/8390/pcnet_cs.c copyout(shmem, buf, count); shmem 486 drivers/net/ethernet/8390/smc-ultra.c void __iomem *shmem = ei_status.mem + ((start_page - START_PG)<<8); shmem 491 drivers/net/ethernet/8390/smc-ultra.c memcpy_toio(shmem, buf, count); shmem 465 drivers/net/ethernet/8390/wd.c void __iomem *shmem = ei_status.mem + ((start_page - WD_START_PG)<<8); shmem 471 drivers/net/ethernet/8390/wd.c memcpy_toio(shmem, buf, count); shmem 474 drivers/net/ethernet/8390/wd.c memcpy_toio(shmem, buf, count); shmem 543 drivers/net/ethernet/alacritech/slic.h struct slic_shmem shmem; shmem 628 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 677 drivers/net/ethernet/alacritech/slicoss.c return slic_new_upr(sdev, SLIC_UPR_LSTAT, sdev->shmem.link_paddr); shmem 714 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 737 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 1228 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 1248 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 1257 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 1600 drivers/net/ethernet/alacritech/slicoss.c struct slic_shmem *sm = &sdev->shmem; shmem 9577 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c u32 shmem; shmem 9587 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c shmem = REG_RD(bp, MISC_REG_SHARED_MEM_ADDR); shmem 9592 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c if (shmem > 0) shmem 9593 drivers/net/ethernet/broadcom/bnx2x/bnx2x_main.c REG_WR(bp, shmem + validity_offset, 0); shmem 32 fs/proc/task_mmu.c unsigned long text, lib, swap, anon, file, shmem; shmem 37 fs/proc/task_mmu.c shmem = get_mm_counter(mm, MM_SHMEMPAGES); shmem 49 fs/proc/task_mmu.c hiwater_rss = total_rss = anon + file + shmem; shmem 67 fs/proc/task_mmu.c SEQ_PUT_DEC(" kB\nRssShmem:\t", shmem); shmem 133 include/drm/drm_gem_shmem_helper.h int drm_gem_shmem_get_pages(struct drm_gem_shmem_object *shmem); shmem 134 include/drm/drm_gem_shmem_helper.h void drm_gem_shmem_put_pages(struct drm_gem_shmem_object *shmem); shmem 142 include/drm/drm_gem_shmem_helper.h static inline bool drm_gem_shmem_is_purgeable(struct drm_gem_shmem_object *shmem) shmem 144 include/drm/drm_gem_shmem_helper.h return (shmem->madv > 0) && shmem 145 include/drm/drm_gem_shmem_helper.h !shmem->vmap_use_count && shmem->sgt && shmem 146 include/drm/drm_gem_shmem_helper.h !shmem->base.dma_buf && !shmem->base.import_attach;