cpu_cache 123 arch/arm/include/asm/cacheflush.h extern struct cpu_cache_fns cpu_cache; cpu_cache 125 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_icache_all cpu_cache.flush_icache_all cpu_cache 126 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_kern_all cpu_cache.flush_kern_all cpu_cache 127 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_kern_louis cpu_cache.flush_kern_louis cpu_cache 128 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_user_all cpu_cache.flush_user_all cpu_cache 129 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_user_range cpu_cache.flush_user_range cpu_cache 130 arch/arm/include/asm/cacheflush.h #define __cpuc_coherent_kern_range cpu_cache.coherent_kern_range cpu_cache 131 arch/arm/include/asm/cacheflush.h #define __cpuc_coherent_user_range cpu_cache.coherent_user_range cpu_cache 132 arch/arm/include/asm/cacheflush.h #define __cpuc_flush_dcache_area cpu_cache.flush_kern_dcache_area cpu_cache 140 arch/arm/include/asm/cacheflush.h #define dmac_flush_range cpu_cache.dma_flush_range cpu_cache 127 arch/arm/kernel/setup.c struct cpu_cache_fns cpu_cache __ro_after_init; cpu_cache 705 arch/arm/kernel/setup.c cpu_cache = *list->cache; cpu_cache 28 arch/arm/mm/dma.h #define dmac_map_area cpu_cache.dma_map_area cpu_cache 29 arch/arm/mm/dma.h #define dmac_unmap_area cpu_cache.dma_unmap_area cpu_cache 31 arch/arm/mm/proc-syms.c EXPORT_SYMBOL(cpu_cache); cpu_cache 453 arch/powerpc/kernel/cacheinfo.c struct cache *cpu_cache = NULL; cpu_cache 462 arch/powerpc/kernel/cacheinfo.c cpu_cache = cache_lookup_or_instantiate(cpu_node, 1); cpu_cache 463 arch/powerpc/kernel/cacheinfo.c if (!cpu_cache) cpu_cache 466 arch/powerpc/kernel/cacheinfo.c do_subsidiary_caches(cpu_cache); cpu_cache 468 arch/powerpc/kernel/cacheinfo.c cache_cpu_set(cpu_cache, cpu_id); cpu_cache 472 arch/powerpc/kernel/cacheinfo.c return cpu_cache; cpu_cache 12 include/linux/slab_def.h struct array_cache __percpu *cpu_cache; cpu_cache 394 mm/slab.c return this_cpu_ptr(cachep->cpu_cache); cpu_cache 963 mm/slab.c nc = per_cpu_ptr(cachep->cpu_cache, cpu); cpu_cache 1726 mm/slab.c struct array_cache __percpu *cpu_cache; cpu_cache 1729 mm/slab.c cpu_cache = __alloc_percpu(size, sizeof(void *)); cpu_cache 1731 mm/slab.c if (!cpu_cache) cpu_cache 1735 mm/slab.c init_arraycache(per_cpu_ptr(cpu_cache, cpu), cpu_cache 1739 mm/slab.c return cpu_cache; cpu_cache 1747 mm/slab.c cachep->cpu_cache = alloc_kmem_cache_cpus(cachep, 1, 1); cpu_cache 1748 mm/slab.c if (!cachep->cpu_cache) cpu_cache 2268 mm/slab.c free_percpu(cachep->cpu_cache); cpu_cache 3801 mm/slab.c struct array_cache __percpu *cpu_cache, *prev; cpu_cache 3804 mm/slab.c cpu_cache = alloc_kmem_cache_cpus(cachep, limit, batchcount); cpu_cache 3805 mm/slab.c if (!cpu_cache) cpu_cache 3808 mm/slab.c prev = cachep->cpu_cache; cpu_cache 3809 mm/slab.c cachep->cpu_cache = cpu_cache;