Searched refs:cpu_cache (Results 1 – 6 of 6) sorted by relevance
126 extern struct cpu_cache_fns cpu_cache;128 #define __cpuc_flush_icache_all cpu_cache.flush_icache_all129 #define __cpuc_flush_kern_all cpu_cache.flush_kern_all130 #define __cpuc_flush_kern_louis cpu_cache.flush_kern_louis131 #define __cpuc_flush_user_all cpu_cache.flush_user_all132 #define __cpuc_flush_user_range cpu_cache.flush_user_range133 #define __cpuc_coherent_kern_range cpu_cache.coherent_kern_range134 #define __cpuc_coherent_user_range cpu_cache.coherent_user_range135 #define __cpuc_flush_dcache_area cpu_cache.flush_kern_dcache_area143 #define dmac_map_area cpu_cache.dma_map_area[all …]
456 struct cache *cpu_cache = NULL; in cache_chain_instantiate() local465 cpu_cache = cache_lookup_or_instantiate(cpu_node, 1); in cache_chain_instantiate()466 if (!cpu_cache) in cache_chain_instantiate()469 do_subsidiary_caches(cpu_cache); in cache_chain_instantiate()471 cache_cpu_set(cpu_cache, cpu_id); in cache_chain_instantiate()475 return cpu_cache; in cache_chain_instantiate()
34 EXPORT_SYMBOL(cpu_cache);
11 struct array_cache __percpu *cpu_cache; member
474 return this_cpu_ptr(cachep->cpu_cache); in cpu_cache_get()1118 nc = per_cpu_ptr(cachep->cpu_cache, cpu); in cpuup_canceled()2006 struct array_cache __percpu *cpu_cache; in alloc_kmem_cache_cpus() local2009 cpu_cache = __alloc_percpu(size, sizeof(void *)); in alloc_kmem_cache_cpus()2011 if (!cpu_cache) in alloc_kmem_cache_cpus()2015 init_arraycache(per_cpu_ptr(cpu_cache, cpu), in alloc_kmem_cache_cpus()2019 return cpu_cache; in alloc_kmem_cache_cpus()2027 cachep->cpu_cache = alloc_kmem_cache_cpus(cachep, 1, 1); in setup_cpu_cache()2028 if (!cachep->cpu_cache) in setup_cpu_cache()2433 free_percpu(cachep->cpu_cache); in __kmem_cache_shutdown()[all …]
119 struct cpu_cache_fns cpu_cache __read_mostly;626 cpu_cache = *list->cache; in setup_processor()