get_node 355 drivers/gpu/drm/amd/amdgpu/amdgpu_gtt_mgr.c .get_node = amdgpu_gtt_mgr_new, get_node 466 drivers/gpu/drm/amd/amdgpu/amdgpu_vram_mgr.c .get_node = amdgpu_vram_mgr_new, get_node 91 drivers/gpu/drm/nouveau/nouveau_ttm.c .get_node = nouveau_vram_manager_new, get_node 117 drivers/gpu/drm/nouveau/nouveau_ttm.c .get_node = nouveau_gart_manager_new, get_node 156 drivers/gpu/drm/nouveau/nouveau_ttm.c .get_node = nv04_gart_manager_new, get_node 976 drivers/gpu/drm/ttm/ttm_bo.c ret = (*man->func->get_node)(man, bo, place, mem); get_node 1125 drivers/gpu/drm/ttm/ttm_bo.c ret = (*man->func->get_node)(man, bo, place, mem); get_node 152 drivers/gpu/drm/ttm/ttm_bo_manager.c .get_node = ttm_bo_man_get_node, get_node 110 drivers/gpu/drm/virtio/virtgpu_ttm.c .get_node = ttm_bo_man_get_node, get_node 151 drivers/gpu/drm/vmwgfx/vmwgfx_gmrid_manager.c .get_node = vmw_gmrid_man_get_node, get_node 115 drivers/md/dm-table.c return get_node(t, l, n)[KEYS_PER_NODE - 1]; get_node 128 drivers/md/dm-table.c node = get_node(t, l, n); get_node 1373 drivers/md/dm-table.c node = get_node(t, l, n); get_node 396 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); get_node 428 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); get_node 473 drivers/sbus/char/openprom.c dp = get_node(op.op_nodeid, data); get_node 106 include/drm/ttm/ttm_bo_driver.h int (*get_node)(struct ttm_mem_type_manager *man, get_node 244 mm/slab.c list_splice(&get_node(cachep, nodeid)->slab, listp); \ get_node 559 mm/slab.c n = get_node(cachep, page_node); get_node 688 mm/slab.c struct kmem_cache_node *n = get_node(cachep, node); get_node 760 mm/slab.c n = get_node(cachep, node); get_node 774 mm/slab.c n = get_node(cachep, page_node); get_node 816 mm/slab.c n = get_node(cachep, node); get_node 899 mm/slab.c n = get_node(cachep, node); get_node 953 mm/slab.c n = get_node(cachep, node); get_node 999 mm/slab.c n = get_node(cachep, node); get_node 1103 mm/slab.c n = get_node(cachep, node); get_node 2099 mm/slab.c assert_spin_locked(&get_node(cachep, numa_mem_id())->list_lock); get_node 2107 mm/slab.c assert_spin_locked(&get_node(cachep, node)->list_lock); get_node 2146 mm/slab.c n = get_node(cachep, node); get_node 2604 mm/slab.c n = get_node(cachep, page_node); get_node 2658 mm/slab.c n = get_node(cachep, page_to_nid(page)); get_node 2927 mm/slab.c n = get_node(cachep, node); get_node 3130 mm/slab.c get_node(cache, nid) && get_node 3131 mm/slab.c get_node(cache, nid)->free_objects) { get_node 3179 mm/slab.c n = get_node(cachep, nodeid); get_node 3236 mm/slab.c if (unlikely(!get_node(cachep, nodeid))) { get_node 3331 mm/slab.c struct kmem_cache_node *n = get_node(cachep, node); get_node 3381 mm/slab.c n = get_node(cachep, node); get_node 3784 mm/slab.c n = get_node(cachep, node); get_node 3832 mm/slab.c n = get_node(cachep, node); get_node 3998 mm/slab.c n = get_node(searchp, node); get_node 639 mm/slab.h if ((__n = get_node(__s, __node))) get_node 1033 mm/slub.c struct kmem_cache_node *n = get_node(s, node); get_node 1045 mm/slub.c struct kmem_cache_node *n = get_node(s, node); get_node 1060 mm/slub.c struct kmem_cache_node *n = get_node(s, node); get_node 1173 mm/slub.c struct kmem_cache_node *n = get_node(s, page_to_nid(page)); get_node 1923 mm/slub.c n = get_node(s, zone_to_nid(zone)); get_node 1957 mm/slub.c object = get_partial_node(s, get_node(s, searchnode), c, flags); get_node 2040 mm/slub.c struct kmem_cache_node *n = get_node(s, page_to_nid(page)); get_node 2194 mm/slub.c n2 = get_node(s, page_to_nid(page)); get_node 2879 mm/slub.c n = get_node(s, page_to_nid(page)); get_node 4112 mm/slub.c n = get_node(s, offline_node);