cache_op          141 arch/arc/kernel/perf_event.c 	unsigned int cache_type, cache_op, cache_result;
cache_op          145 arch/arc/kernel/perf_event.c 	cache_op	= (config >>  8) & 0xff;
cache_op          149 arch/arc/kernel/perf_event.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op          154 arch/arc/kernel/perf_event.c 	ret = arc_pmu_cache_map[cache_type][cache_op][cache_result];
cache_op          160 arch/arc/kernel/perf_event.c 		 cache_type, cache_op, cache_result, ret,
cache_op          952 arch/csky/kernel/perf_event.c 	unsigned int cache_type, cache_op, cache_result;
cache_op          955 arch/csky/kernel/perf_event.c 	cache_op	= (config >>  8) & 0xff;
cache_op          960 arch/csky/kernel/perf_event.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op          965 arch/csky/kernel/perf_event.c 	return csky_pmu_cache_map[cache_type][cache_op][cache_result];
cache_op           58 arch/csky/mm/dma-mapping.c 	cache_op(page_to_phys(page), size, dma_wbinv_set_zero_range);
cache_op           66 arch/csky/mm/dma-mapping.c 		cache_op(paddr, size, dma_wb_range);
cache_op           70 arch/csky/mm/dma-mapping.c 		cache_op(paddr, size, dma_wbinv_range);
cache_op           85 arch/csky/mm/dma-mapping.c 		cache_op(paddr, size, dma_inv_range);
cache_op           98 arch/mips/include/asm/bmips.h 	cache_op(Index_Load_Tag_S, ZSCM_REG_BASE + offset);
cache_op          119 arch/mips/include/asm/bmips.h 	cache_op(Index_Store_Tag_S, ZSCM_REG_BASE + offset);
cache_op           54 arch/mips/include/asm/r4kcache.h 	cache_op(Index_Invalidate_I, addr);
cache_op           59 arch/mips/include/asm/r4kcache.h 	cache_op(Index_Writeback_Inv_D, addr);
cache_op           64 arch/mips/include/asm/r4kcache.h 	cache_op(Index_Writeback_Inv_SD, addr);
cache_op           71 arch/mips/include/asm/r4kcache.h 		cache_op(Hit_Invalidate_I_Loongson2, addr);
cache_op           75 arch/mips/include/asm/r4kcache.h 		cache_op(Hit_Invalidate_I, addr);
cache_op           82 arch/mips/include/asm/r4kcache.h 	cache_op(Hit_Writeback_Inv_D, addr);
cache_op           87 arch/mips/include/asm/r4kcache.h 	cache_op(Hit_Invalidate_D, addr);
cache_op           92 arch/mips/include/asm/r4kcache.h 	cache_op(Hit_Invalidate_SD, addr);
cache_op           97 arch/mips/include/asm/r4kcache.h 	cache_op(Hit_Writeback_Inv_SD, addr);
cache_op          193 arch/mips/include/asm/r4kcache.h 	cache_op(Page_Invalidate_T, addr);
cache_op          624 arch/mips/include/asm/r4kcache.h 		prot##cache_op(hitop, addr);				\
cache_op          208 arch/mips/kernel/mips-mt.c 		cache_op(Index_Load_Tag_D, INDEX_8);
cache_op          218 arch/mips/kernel/mips-mt.c 		cache_op(Index_Store_Tag_D, INDEX_8);
cache_op          223 arch/mips/kernel/mips-mt.c 		cache_op(Index_Store_Tag_D, INDEX_0);
cache_op          684 arch/mips/kernel/perf_event_mipsxx.c 	unsigned int cache_type, cache_op, cache_result;
cache_op          691 arch/mips/kernel/perf_event_mipsxx.c 	cache_op = (config >> 8) & 0xff;
cache_op          692 arch/mips/kernel/perf_event_mipsxx.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op          701 arch/mips/kernel/perf_event_mipsxx.c 					[cache_op]
cache_op           54 arch/mips/kernel/spram.c 	cache_op(Index_Store_Tag_I, CKSEG0|offset);
cache_op           70 arch/mips/kernel/spram.c 	cache_op(Index_Load_Tag_I, CKSEG0 | offset);
cache_op           89 arch/mips/kernel/spram.c 	cache_op(Index_Store_Tag_D, CKSEG0 | offset);
cache_op          103 arch/mips/kernel/spram.c 	cache_op(Index_Load_Tag_D, CKSEG0 | offset);
cache_op         1952 arch/mips/kvm/emulate.c 	case cache_op:
cache_op         1156 arch/mips/kvm/vz.c 	case cache_op:
cache_op         1539 arch/mips/mm/c-r4k.c 	cache_op(Index_Store_Tag_I, begin);
cache_op         1540 arch/mips/mm/c-r4k.c 	cache_op(Index_Store_Tag_D, begin);
cache_op         1541 arch/mips/mm/c-r4k.c 	cache_op(Index_Store_Tag_SD, begin);
cache_op         1546 arch/mips/mm/c-r4k.c 		cache_op(Index_Load_Tag_SD, addr);
cache_op           41 arch/mips/mm/sc-mips.c 	cache_op(Hit_Writeback_Inv_SD, addr & almask);
cache_op           42 arch/mips/mm/sc-mips.c 	cache_op(Hit_Writeback_Inv_SD, (addr + size - 1) & almask);
cache_op           31 arch/mips/mm/sc-r5k.c 		cache_op(R5K_Page_Invalidate_S, start);
cache_op           55 arch/mips/mm/sc-r5k.c 		cache_op(R5K_Page_Invalidate_S, a);
cache_op           99 arch/mips/mm/sc-rm7k.c 		cache_op(Page_Invalidate_T, start);
cache_op          117 arch/mips/mm/sc-rm7k.c 		cache_op(Index_Store_Tag_T, CKSEG0ADDR(i));
cache_op          143 arch/mips/mm/sc-rm7k.c 		cache_op(Index_Store_Tag_SD, CKSEG0ADDR(i));
cache_op          210 arch/mips/mm/sc-rm7k.c 	cache_op(Index_Store_Tag_T, begin);
cache_op          215 arch/mips/mm/sc-rm7k.c 		cache_op(Index_Load_Tag_T, addr);
cache_op           68 arch/mips/mm/uasm-mips.c 	[insn_cache]	= {M(cache_op, 0, 0, 0, 0, 0),  RS | RT | SIMM},
cache_op           58 arch/mips/pmcs-msp71xx/msp_setup.c 		cache_op(Fill, iptr);
cache_op           74 arch/mips/sgi-ip22/ip28-berr.c 	cache_op(Index_Load_Tag_S, addr);
cache_op           77 arch/mips/sgi-ip22/ip28-berr.c 	cache_op(Index_Load_Tag_S, addr | 1L);
cache_op           92 arch/mips/sgi-ip22/ip28-berr.c 		cache_op(Index_Load_Tag_D, addr);
cache_op           95 arch/mips/sgi-ip22/ip28-berr.c 		cache_op(Index_Load_Tag_D, addr | 1L);
cache_op          108 arch/mips/sgi-ip22/ip28-berr.c 		cache_op(Index_Load_Tag_I, addr);
cache_op          111 arch/mips/sgi-ip22/ip28-berr.c 		cache_op(Index_Load_Tag_I, addr | 1L);
cache_op          161 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 0);
cache_op          162 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 1);
cache_op          163 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 2);
cache_op          164 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 3);
cache_op          210 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 0);
cache_op          211 arch/mips/txx9/generic/setup.c 		cache_op(Index_Writeback_Inv_D, addr | 1);
cache_op           57 arch/nds32/kernel/dma.c 		cache_op(paddr, size, cpu_dma_wb_range);
cache_op           72 arch/nds32/kernel/dma.c 		cache_op(paddr, size, cpu_dma_inval_range);
cache_op           81 arch/nds32/kernel/dma.c 	cache_op(page_to_phys(page), size, cpu_dma_wbinval_range);
cache_op           46 arch/nds32/kernel/perf_event_cpu.c 	unsigned int cache_type, cache_op, cache_result, ret;
cache_op           52 arch/nds32/kernel/perf_event_cpu.c 	cache_op = (config >> 8) & 0xff;
cache_op           53 arch/nds32/kernel/perf_event_cpu.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op           60 arch/nds32/kernel/perf_event_cpu.c 	ret = (int)(*cache_map)[cache_type][cache_op][cache_result];
cache_op         1198 arch/sparc/kernel/perf_event.c 	unsigned int cache_type, cache_op, cache_result;
cache_op         1208 arch/sparc/kernel/perf_event.c 	cache_op = (config >>  8) & 0xff;
cache_op         1209 arch/sparc/kernel/perf_event.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op         1216 arch/sparc/kernel/perf_event.c 	pmap = &((*sparc_pmu->cache_map)[cache_type][cache_op][cache_result]);
cache_op          303 arch/x86/events/core.c 	unsigned int cache_type, cache_op, cache_result;
cache_op          313 arch/x86/events/core.c 	cache_op = (config >>  8) & 0xff;
cache_op          314 arch/x86/events/core.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op          316 arch/x86/events/core.c 	cache_op = array_index_nospec(cache_op, PERF_COUNT_HW_CACHE_OP_MAX);
cache_op          323 arch/x86/events/core.c 	val = hw_cache_event_ids[cache_type][cache_op][cache_result];
cache_op          332 arch/x86/events/core.c 	attr->config1 = hw_cache_extra_regs[cache_type][cache_op][cache_result];
cache_op          106 arch/xtensa/kernel/perf_event.c 	unsigned int cache_type, cache_op, cache_result;
cache_op          110 arch/xtensa/kernel/perf_event.c 	cache_op = (config >>  8) & 0xff;
cache_op          114 arch/xtensa/kernel/perf_event.c 	    cache_op >= C(OP_MAX) ||
cache_op          118 arch/xtensa/kernel/perf_event.c 	ret = xtensa_cache_ctl[cache_type][cache_op][cache_result];
cache_op           47 drivers/perf/arm_pmu.c 	unsigned int cache_type, cache_op, cache_result, ret;
cache_op           53 drivers/perf/arm_pmu.c 	cache_op = (config >>  8) & 0xff;
cache_op           54 drivers/perf/arm_pmu.c 	if (cache_op >= PERF_COUNT_HW_CACHE_OP_MAX)
cache_op           64 drivers/perf/arm_pmu.c 	ret = (int)(*cache_map)[cache_type][cache_op][cache_result];
cache_op          414 tools/perf/util/parse-events.c 	int cache_type = -1, cache_op = -1, cache_result = -1;
cache_op          435 tools/perf/util/parse-events.c 		if (cache_op == -1) {
cache_op          436 tools/perf/util/parse-events.c 			cache_op = parse_aliases(str, perf_evsel__hw_cache_op,
cache_op          438 tools/perf/util/parse-events.c 			if (cache_op >= 0) {
cache_op          439 tools/perf/util/parse-events.c 				if (!perf_evsel__is_cache_op_valid(cache_type, cache_op))
cache_op          456 tools/perf/util/parse-events.c 	if (cache_op == -1)
cache_op          457 tools/perf/util/parse-events.c 		cache_op = PERF_COUNT_HW_CACHE_OP_READ;
cache_op          466 tools/perf/util/parse-events.c 	attr.config = cache_type | (cache_op << 8) | (cache_result << 16);