L1_CACHE_BYTES 21 arch/alpha/include/asm/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 17 arch/arc/include/asm/cache.h #define CACHE_LINE_MASK (~(L1_CACHE_BYTES - 1)) L1_CACHE_BYTES 275 arch/arc/mm/cache.c num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); L1_CACHE_BYTES 282 arch/arc/mm/cache.c paddr += L1_CACHE_BYTES; L1_CACHE_BYTES 320 arch/arc/mm/cache.c num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); L1_CACHE_BYTES 342 arch/arc/mm/cache.c paddr += L1_CACHE_BYTES; L1_CACHE_BYTES 346 arch/arc/mm/cache.c vaddr += L1_CACHE_BYTES; L1_CACHE_BYTES 390 arch/arc/mm/cache.c num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); L1_CACHE_BYTES 410 arch/arc/mm/cache.c paddr += L1_CACHE_BYTES; L1_CACHE_BYTES 443 arch/arc/mm/cache.c sz += L1_CACHE_BYTES - 1; L1_CACHE_BYTES 1226 arch/arc/mm/cache.c if (ic->line_len != L1_CACHE_BYTES) L1_CACHE_BYTES 1228 arch/arc/mm/cache.c ic->line_len, L1_CACHE_BYTES); L1_CACHE_BYTES 1246 arch/arc/mm/cache.c if (dc->line_len != L1_CACHE_BYTES) L1_CACHE_BYTES 1248 arch/arc/mm/cache.c dc->line_len, L1_CACHE_BYTES); L1_CACHE_BYTES 1270 arch/arc/mm/cache.c BUILD_BUG_ON_MSG(L1_CACHE_BYTES > SMP_CACHE_BYTES, L1_CACHE_BYTES 127 arch/arm/common/bL_switcher.c stack = PTR_ALIGN(stack, L1_CACHE_BYTES); L1_CACHE_BYTES 18 arch/arm/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 343 arch/arm/kernel/bios32.c L1_CACHE_BYTES >> 2); L1_CACHE_BYTES 14 arch/arm/kernel/module-plts.c #define PLT_ENT_STRIDE L1_CACHE_BYTES L1_CACHE_BYTES 243 arch/arm/kernel/module-plts.c mod->arch.core.plt->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 250 arch/arm/kernel/module-plts.c mod->arch.init.plt->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 316 arch/arm64/kernel/module-plts.c pltsec->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 324 arch/arm64/kernel/module-plts.c pltsec->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 45 arch/c6x/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 46 arch/c6x/include/asm/cache.h #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 11 arch/csky/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 67 arch/csky/mm/cachev1.c i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 68 arch/csky/mm/cachev1.c for (; i < end; i += L1_CACHE_BYTES) { L1_CACHE_BYTES 17 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 19 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 32 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 34 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 41 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 43 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 50 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 52 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 56 arch/csky/mm/cachev2.c i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 57 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 65 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 67 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 74 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 76 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 83 arch/csky/mm/cachev2.c unsigned long i = start & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 85 arch/csky/mm/cachev2.c for (; i < end; i += L1_CACHE_BYTES) L1_CACHE_BYTES 15 arch/hexagon/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 17 arch/hexagon/include/asm/cache.h #define __cacheline_aligned __aligned(L1_CACHE_BYTES) L1_CACHE_BYTES 18 arch/hexagon/include/asm/cache.h #define ____cacheline_aligned __aligned(L1_CACHE_BYTES) L1_CACHE_BYTES 17 arch/ia64/include/asm/cache.h # define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 659 arch/ia64/include/asm/processor.h #define PREFETCH_STRIDE L1_CACHE_BYTES L1_CACHE_BYTES 85 arch/ia64/kernel/minstate.h adds r17=2*L1_CACHE_BYTES,r1; /* really: biggest cache-line size */ \ L1_CACHE_BYTES 88 arch/ia64/kernel/minstate.h lfetch.fault.excl.nt1 [r17],L1_CACHE_BYTES; \ L1_CACHE_BYTES 119 arch/ia64/mm/discontig.c pernodesize += node * L1_CACHE_BYTES; L1_CACHE_BYTES 271 arch/ia64/mm/discontig.c pernode += node * L1_CACHE_BYTES; L1_CACHE_BYTES 12 arch/m68k/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 22 arch/microblaze/include/asm/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 43 arch/microblaze/include/asm/page.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 45 arch/microblaze/include/asm/page.h #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 5 arch/mips/include/asm/mach-tx49xx/kmalloc.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 56 arch/mips/pmcs-msp71xx/msp_setup.c for (iptr = (void *)((unsigned int)start & ~(L1_CACHE_BYTES - 1)); L1_CACHE_BYTES 57 arch/mips/pmcs-msp71xx/msp_setup.c iptr < end; iptr += L1_CACHE_BYTES) L1_CACHE_BYTES 10 arch/nds32/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 123 arch/nds32/kernel/setup.c if (L1_cache_info[DCACHE].size != L1_CACHE_BYTES) L1_CACHE_BYTES 126 arch/nds32/kernel/setup.c L1_cache_info[DCACHE].size, L1_CACHE_BYTES); L1_CACHE_BYTES 21 arch/nios2/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 22 arch/openrisc/mm/cache.c unsigned long line = paddr & ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 26 arch/openrisc/mm/cache.c line += L1_CACHE_BYTES; L1_CACHE_BYTES 30 arch/parisc/include/asm/atomic.h # define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) (a))/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE-1) ])) L1_CACHE_BYTES 21 arch/parisc/include/asm/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 23 arch/parisc/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 32 arch/powerpc/include/asm/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 85 arch/powerpc/include/asm/cache.h return L1_CACHE_BYTES; L1_CACHE_BYTES 95 arch/powerpc/include/asm/cache.h return L1_CACHE_BYTES; L1_CACHE_BYTES 16 arch/powerpc/include/asm/page_32.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 54 arch/powerpc/include/asm/page_32.h WARN_ON((unsigned long)addr & (L1_CACHE_BYTES - 1)); L1_CACHE_BYTES 56 arch/powerpc/include/asm/page_32.h for (i = 0; i < PAGE_SIZE / L1_CACHE_BYTES; i++, addr += L1_CACHE_BYTES) L1_CACHE_BYTES 365 arch/powerpc/kernel/misc_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 367 arch/powerpc/kernel/misc_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 370 arch/powerpc/kernel/misc_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 157 arch/powerpc/kernel/paca.c s = alloc_paca_data(sizeof(*s), L1_CACHE_BYTES, limit, cpu); L1_CACHE_BYTES 266 arch/powerpc/kernel/paca.c paca = alloc_paca_data(sizeof(struct paca_struct), L1_CACHE_BYTES, L1_CACHE_BYTES 732 arch/powerpc/kernel/vdso.c vdso_data->dcache_block_size = L1_CACHE_BYTES; L1_CACHE_BYTES 734 arch/powerpc/kernel/vdso.c vdso_data->icache_block_size = L1_CACHE_BYTES; L1_CACHE_BYTES 462 arch/powerpc/kvm/book3s_hv.c if (addr & (L1_CACHE_BYTES - 1)) L1_CACHE_BYTES 510 arch/powerpc/kvm/book3s_hv.c if ((vpa & (L1_CACHE_BYTES - 1)) || !vpa) L1_CACHE_BYTES 958 arch/powerpc/kvm/book3s_hv_rm_mmu.c for (i = 0; i < SZ_4K; i += L1_CACHE_BYTES, pa += L1_CACHE_BYTES) L1_CACHE_BYTES 203 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 205 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 208 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 279 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 281 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 284 arch/powerpc/lib/checksum_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 214 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 216 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 219 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 393 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 395 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 398 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 451 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 32 L1_CACHE_BYTES 453 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 64 L1_CACHE_BYTES 456 arch/powerpc/lib/copy_32.S #if L1_CACHE_BYTES >= 128 L1_CACHE_BYTES 891 arch/powerpc/lib/sstep.c size = L1_CACHE_BYTES; L1_CACHE_BYTES 35 arch/powerpc/mm/dma-noncoherent.c if ((start | end) & (L1_CACHE_BYTES - 1)) L1_CACHE_BYTES 366 arch/powerpc/mm/mem.c allow_read_from_user((const void __user *)addr, L1_CACHE_BYTES); L1_CACHE_BYTES 368 arch/powerpc/mm/mem.c prevent_read_from_user((const void __user *)addr, L1_CACHE_BYTES); L1_CACHE_BYTES 1008 arch/powerpc/platforms/powermac/pci.c L1_CACHE_BYTES >> 2); L1_CACHE_BYTES 1796 arch/powerpc/xmon/xmon.c nflush = (nflush + L1_CACHE_BYTES - 1) / L1_CACHE_BYTES; L1_CACHE_BYTES 1802 arch/powerpc/xmon/xmon.c for (; nflush > 0; --nflush, adrs += L1_CACHE_BYTES) L1_CACHE_BYTES 1805 arch/powerpc/xmon/xmon.c for (; nflush > 0; --nflush, adrs += L1_CACHE_BYTES) L1_CACHE_BYTES 140 arch/riscv/kernel/module-sections.c mod->arch.plt.shdr->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 147 arch/riscv/kernel/module-sections.c mod->arch.got.shdr->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 154 arch/riscv/kernel/module-sections.c mod->arch.got_plt.shdr->sh_addralign = L1_CACHE_BYTES; L1_CACHE_BYTES 437 arch/s390/lib/uaccess.c offset = (size_t)src & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 438 arch/s390/lib/uaccess.c len = min(size - done, L1_CACHE_BYTES - offset); L1_CACHE_BYTES 195 arch/sh/include/asm/page.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 92 arch/sh/include/asm/processor.h } __attribute__ ((aligned(L1_CACHE_BYTES))); L1_CACHE_BYTES 191 arch/sh/include/asm/processor_32.h #define PREFETCH_STRIDE L1_CACHE_BYTES L1_CACHE_BYTES 38 arch/sh/kernel/cpu/sh2/probe.c boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 47 arch/sh/kernel/cpu/sh2a/probe.c boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 52 arch/sh/kernel/cpu/sh3/probe.c boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 39 arch/sh/kernel/cpu/sh4/probe.c boot_cpu_data.icache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 48 arch/sh/kernel/cpu/sh4/probe.c boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 245 arch/sh/kernel/cpu/sh4/probe.c boot_cpu_data.scache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 41 arch/sh/kernel/cpu/sh5/probe.c boot_cpu_data.icache.linesz = L1_CACHE_BYTES; L1_CACHE_BYTES 23 arch/sh/mm/cache-sh2.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 24 arch/sh/mm/cache-sh2.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 25 arch/sh/mm/cache-sh2.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 26 arch/sh/mm/cache-sh2.c for (v = begin; v < end; v+=L1_CACHE_BYTES) { L1_CACHE_BYTES 44 arch/sh/mm/cache-sh2.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 45 arch/sh/mm/cache-sh2.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 46 arch/sh/mm/cache-sh2.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 48 arch/sh/mm/cache-sh2.c for (v = begin; v < end; v+=L1_CACHE_BYTES) L1_CACHE_BYTES 75 arch/sh/mm/cache-sh2.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 76 arch/sh/mm/cache-sh2.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 77 arch/sh/mm/cache-sh2.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 79 arch/sh/mm/cache-sh2.c for (v = begin; v < end; v+=L1_CACHE_BYTES) L1_CACHE_BYTES 57 arch/sh/mm/cache-sh2a.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 58 arch/sh/mm/cache-sh2a.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 59 arch/sh/mm/cache-sh2a.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 70 arch/sh/mm/cache-sh2a.c for (v = begin; v < end; v += L1_CACHE_BYTES) { L1_CACHE_BYTES 78 arch/sh/mm/cache-sh2a.c for (v = begin; v < end; v += L1_CACHE_BYTES) L1_CACHE_BYTES 97 arch/sh/mm/cache-sh2a.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 98 arch/sh/mm/cache-sh2a.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 99 arch/sh/mm/cache-sh2a.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 104 arch/sh/mm/cache-sh2a.c for (v = begin; v < end; v+=L1_CACHE_BYTES) { L1_CACHE_BYTES 127 arch/sh/mm/cache-sh2a.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 128 arch/sh/mm/cache-sh2a.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 129 arch/sh/mm/cache-sh2a.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 139 arch/sh/mm/cache-sh2a.c for (v = begin; v < end; v += L1_CACHE_BYTES) L1_CACHE_BYTES 157 arch/sh/mm/cache-sh2a.c start = data->addr1 & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 158 arch/sh/mm/cache-sh2a.c end = (data->addr2 + L1_CACHE_BYTES-1) & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 173 arch/sh/mm/cache-sh2a.c for (v = start; v < end; v += L1_CACHE_BYTES) L1_CACHE_BYTES 40 arch/sh/mm/cache-sh3.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 41 arch/sh/mm/cache-sh3.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 42 arch/sh/mm/cache-sh3.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 44 arch/sh/mm/cache-sh3.c for (v = begin; v < end; v+=L1_CACHE_BYTES) { L1_CACHE_BYTES 78 arch/sh/mm/cache-sh3.c begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 79 arch/sh/mm/cache-sh3.c end = ((unsigned long)start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 80 arch/sh/mm/cache-sh3.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 82 arch/sh/mm/cache-sh3.c for (v = begin; v < end; v+=L1_CACHE_BYTES) { L1_CACHE_BYTES 60 arch/sh/mm/cache-sh4.c start &= ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 61 arch/sh/mm/cache-sh4.c end += L1_CACHE_BYTES-1; L1_CACHE_BYTES 62 arch/sh/mm/cache-sh4.c end &= ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 67 arch/sh/mm/cache-sh4.c for (v = start; v < end; v += L1_CACHE_BYTES) { L1_CACHE_BYTES 83 arch/sh/mm/cache-sh5.c addr += L1_CACHE_BYTES; L1_CACHE_BYTES 228 arch/sh/mm/cache-sh5.c addr += L1_CACHE_BYTES; L1_CACHE_BYTES 234 arch/sh/mm/cache-sh5.c #define DUMMY_ALLOCO_AREA_SIZE ((L1_CACHE_BYTES << 10) + (1024 * 4)) L1_CACHE_BYTES 345 arch/sh/mm/cache-sh5.c magic_eaddr += L1_CACHE_BYTES; L1_CACHE_BYTES 374 arch/sh/mm/cache-sh5.c eaddr += L1_CACHE_BYTES; L1_CACHE_BYTES 599 arch/sh/mm/cache-sh5.c unsigned long end = (unsigned long)vaddr + L1_CACHE_BYTES; L1_CACHE_BYTES 601 arch/sh/mm/cache-sh5.c __flush_wback_region(vaddr, L1_CACHE_BYTES); L1_CACHE_BYTES 19 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 20 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 21 arch/sh/mm/flush-sh4.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 22 arch/sh/mm/flush-sh4.c cnt = (end - v) / L1_CACHE_BYTES; L1_CACHE_BYTES 25 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 26 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 27 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 28 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 29 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 30 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 31 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 32 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 37 arch/sh/mm/flush-sh4.c __ocbwb(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 53 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 54 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 55 arch/sh/mm/flush-sh4.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 56 arch/sh/mm/flush-sh4.c cnt = (end - v) / L1_CACHE_BYTES; L1_CACHE_BYTES 59 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 60 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 61 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 62 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 63 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 64 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 65 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 66 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 70 arch/sh/mm/flush-sh4.c __ocbp(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 83 arch/sh/mm/flush-sh4.c v = aligned_start & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 84 arch/sh/mm/flush-sh4.c end = (aligned_start + size + L1_CACHE_BYTES-1) L1_CACHE_BYTES 85 arch/sh/mm/flush-sh4.c & ~(L1_CACHE_BYTES-1); L1_CACHE_BYTES 86 arch/sh/mm/flush-sh4.c cnt = (end - v) / L1_CACHE_BYTES; L1_CACHE_BYTES 89 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 90 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 91 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 92 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 93 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 94 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 95 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 96 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 101 arch/sh/mm/flush-sh4.c __ocbi(v); v += L1_CACHE_BYTES; L1_CACHE_BYTES 22 arch/unicore32/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 156 arch/unicore32/include/asm/tlbflush.h : : "r" (__pa(pmd) & ~(L1_CACHE_BYTES - 1)) : "cc"); L1_CACHE_BYTES 230 arch/unicore32/kernel/pci.c L1_CACHE_BYTES >> 2); L1_CACHE_BYTES 293 arch/x86/platform/uv/uv_time.c offset = (uv_blade_processor_id() * L1_CACHE_BYTES) % PAGE_SIZE; L1_CACHE_BYTES 18 arch/xtensa/include/asm/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 32 arch/xtensa/include/asm/cache.h #define ARCH_DMA_MINALIGN L1_CACHE_BYTES L1_CACHE_BYTES 2445 drivers/acpi/nfit/core.c base_offset = nfit_blk->bdw_offset + dpa % L1_CACHE_BYTES L1_CACHE_BYTES 66 drivers/acpi/nfit/mce.c ALIGN(mce->addr, L1_CACHE_BYTES), L1_CACHE_BYTES 67 drivers/acpi/nfit/mce.c L1_CACHE_BYTES); L1_CACHE_BYTES 311 drivers/ata/pata_hpt366.c pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); L1_CACHE_BYTES 911 drivers/ata/pata_hpt37x.c pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); L1_CACHE_BYTES 539 drivers/ata/pata_hpt3x2n.c pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); L1_CACHE_BYTES 27 drivers/block/skd_s1120.h #define FIT_QCMD_ALIGN L1_CACHE_BYTES L1_CACHE_BYTES 101 drivers/char/uv_mmtimer.c ret = ((uv_blade_processor_id() * L1_CACHE_BYTES) % L1_CACHE_BYTES 52 drivers/crypto/caam/caamrng.c #define RN_BUF_SIZE (0xffff / L1_CACHE_BYTES * \ L1_CACHE_BYTES 53 drivers/crypto/caam/caamrng.c L1_CACHE_BYTES) L1_CACHE_BYTES 405 drivers/dma/mic_x100_dma.c L1_CACHE_BYTES, DMA_BIDIRECTIONAL); L1_CACHE_BYTES 413 drivers/dma/mic_x100_dma.c ch->status_dest = kzalloc(L1_CACHE_BYTES, GFP_KERNEL); L1_CACHE_BYTES 417 drivers/dma/mic_x100_dma.c L1_CACHE_BYTES, DMA_BIDIRECTIONAL); L1_CACHE_BYTES 526 drivers/hid/intel-ish-hid/ishtp-fw-loader.c (fw_info->ldr_capability.max_dma_buf_size % L1_CACHE_BYTES)) { L1_CACHE_BYTES 655 drivers/hid/intel-ish-hid/ishtp-fw-loader.c payload_max_size &= ~(L1_CACHE_BYTES - 1); L1_CACHE_BYTES 891 drivers/hsi/clients/cmt_speech.c hi->slot_size, hi->buf_size, L1_CACHE_BYTES); L1_CACHE_BYTES 896 drivers/hsi/clients/cmt_speech.c data_start, sizeof(*hi->mmap_cfg), L1_CACHE_BYTES); L1_CACHE_BYTES 917 drivers/ide/hpt366.c pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); L1_CACHE_BYTES 1480 drivers/ide/pmac.c if (pmif->broken_dma && cur_addr & (L1_CACHE_BYTES - 1)) { L1_CACHE_BYTES 1483 drivers/infiniband/hw/hfi1/sdma.c dd->sdma_heads_size = L1_CACHE_BYTES * num_engines; L1_CACHE_BYTES 1510 drivers/infiniband/hw/hfi1/sdma.c curr_head += L1_CACHE_BYTES; L1_CACHE_BYTES 213 drivers/infiniband/hw/hfi1/user_sdma.c L1_CACHE_BYTES, L1_CACHE_BYTES 328 drivers/infiniband/hw/qib/qib_init.c ((2 * L1_CACHE_BYTES + L1_CACHE_BYTES 329 drivers/infiniband/hw/qib/qib_init.c dd->pioavregs * sizeof(u64)) & ~L1_CACHE_BYTES)); L1_CACHE_BYTES 28 drivers/md/dm-table.c #define NODE_SIZE L1_CACHE_BYTES L1_CACHE_BYTES 47 drivers/media/platform/rcar_jpu.c #define JPU_JPEG_HDR_SIZE (ALIGN(0x258, L1_CACHE_BYTES)) L1_CACHE_BYTES 892 drivers/misc/mic/scif/scif_dma.c offset_in_ca = offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 894 drivers/misc/mic/scif/scif_dma.c loop_len = L1_CACHE_BYTES - offset_in_ca; L1_CACHE_BYTES 923 drivers/misc/mic/scif/scif_dma.c tail_len = remaining_len & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 943 drivers/misc/mic/scif/scif_dma.c loop_len != L1_CACHE_BYTES) { L1_CACHE_BYTES 959 drivers/misc/mic/scif/scif_dma.c L1_CACHE_BYTES, L1_CACHE_BYTES 971 drivers/misc/mic/scif/scif_dma.c offset += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 972 drivers/misc/mic/scif/scif_dma.c temp_dma_addr += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 973 drivers/misc/mic/scif/scif_dma.c window_dma_addr += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 974 drivers/misc/mic/scif/scif_dma.c remaining_len -= (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1229 drivers/misc/mic/scif/scif_dma.c src_cache_off = src_offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 1232 drivers/misc/mic/scif/scif_dma.c loop_len = L1_CACHE_BYTES - src_cache_off; L1_CACHE_BYTES 1278 drivers/misc/mic/scif/scif_dma.c tail_len = remaining_len & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 1317 drivers/misc/mic/scif/scif_dma.c L1_CACHE_BYTES, L1_CACHE_BYTES 1329 drivers/misc/mic/scif/scif_dma.c src_offset += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1330 drivers/misc/mic/scif/scif_dma.c dst_offset += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1331 drivers/misc/mic/scif/scif_dma.c src_dma_addr += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1332 drivers/misc/mic/scif/scif_dma.c dst_dma_addr += (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1333 drivers/misc/mic/scif/scif_dma.c remaining_len -= (loop_len - L1_CACHE_BYTES); L1_CACHE_BYTES 1547 drivers/misc/mic/scif/scif_dma.c src_cache_off = src_offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 1548 drivers/misc/mic/scif/scif_dma.c dst_cache_off = dst_offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 1566 drivers/misc/mic/scif/scif_dma.c if (work->len + (L1_CACHE_BYTES << 1) < SCIF_KMEM_UNALIGNED_BUF_SIZE) { L1_CACHE_BYTES 1569 drivers/misc/mic/scif/scif_dma.c temp = kmalloc(work->len + (L1_CACHE_BYTES << 1), L1_CACHE_BYTES 1575 drivers/misc/mic/scif/scif_dma.c if (!IS_ALIGNED((u64)temp, L1_CACHE_BYTES)) L1_CACHE_BYTES 1576 drivers/misc/mic/scif/scif_dma.c temp = PTR_ALIGN(temp, L1_CACHE_BYTES); L1_CACHE_BYTES 1594 drivers/misc/mic/scif/scif_dma.c work->len = ALIGN(work->len + src_cache_off, L1_CACHE_BYTES); L1_CACHE_BYTES 68 drivers/misc/mic/scif/scif_rma.h (L1_CACHE_BYTES << 1)) L1_CACHE_BYTES 399 drivers/misc/mic/scif/scif_rma.h src_offset = src_offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 400 drivers/misc/mic/scif/scif_rma.h dst_offset = dst_offset & (L1_CACHE_BYTES - 1); L1_CACHE_BYTES 388 drivers/misc/sgi-xp/xpc_main.c *base = kzalloc(size + L1_CACHE_BYTES, flags); L1_CACHE_BYTES 54 drivers/misc/sgi-xp/xpc_partition.c *base = kmalloc(size + L1_CACHE_BYTES, flags); L1_CACHE_BYTES 120 drivers/misc/sgi-xp/xpnet.c #define XPNET_MAX_MTU (0x800000UL - L1_CACHE_BYTES) L1_CACHE_BYTES 169 drivers/misc/sgi-xp/xpnet.c skb = dev_alloc_skb(msg->size + L1_CACHE_BYTES); L1_CACHE_BYTES 172 drivers/misc/sgi-xp/xpnet.c msg->size + L1_CACHE_BYTES); L1_CACHE_BYTES 186 drivers/misc/sgi-xp/xpnet.c skb_reserve(skb, (L1_CACHE_BYTES - ((u64)skb->data & L1_CACHE_BYTES 187 drivers/misc/sgi-xp/xpnet.c (L1_CACHE_BYTES - 1)) + L1_CACHE_BYTES 208 drivers/misc/sgi-xp/xpnet.c dst = (void *)((u64)skb->data & ~(L1_CACHE_BYTES - 1)); L1_CACHE_BYTES 444 drivers/misc/sgi-xp/xpnet.c start_addr = ((u64)skb->data & ~(L1_CACHE_BYTES - 1)); L1_CACHE_BYTES 248 drivers/net/ethernet/atheros/ag71xx.c L1_CACHE_BYTES) L1_CACHE_BYTES 2376 drivers/net/ethernet/chelsio/cxgb3/sge.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2377 drivers/net/ethernet/chelsio/cxgb3/sge.c prefetch(addr + L1_CACHE_BYTES); L1_CACHE_BYTES 3817 drivers/net/ethernet/chelsio/cxgb4/cxgb4_main.c t4_fixup_host_params(adapter, PAGE_SIZE, L1_CACHE_BYTES); L1_CACHE_BYTES 427 drivers/net/ethernet/hisilicon/hns/hns_enet.c truesize = ALIGN(size, L1_CACHE_BYTES); L1_CACHE_BYTES 561 drivers/net/ethernet/hisilicon/hns/hns_enet.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 562 drivers/net/ethernet/hisilicon/hns/hns_enet.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 2903 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2904 drivers/net/ethernet/hisilicon/hns3/hns3_enet.c prefetch(ring->va + L1_CACHE_BYTES); L1_CACHE_BYTES 317 drivers/net/ethernet/intel/fm10k/fm10k_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 318 drivers/net/ethernet/intel/fm10k/fm10k_main.c prefetch((void *)((u8 *)page_addr + L1_CACHE_BYTES)); L1_CACHE_BYTES 2009 drivers/net/ethernet/intel/i40e/i40e_txrx.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2010 drivers/net/ethernet/intel/i40e/i40e_txrx.c prefetch(xdp->data + L1_CACHE_BYTES); L1_CACHE_BYTES 2095 drivers/net/ethernet/intel/i40e/i40e_txrx.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2096 drivers/net/ethernet/intel/i40e/i40e_txrx.c prefetch(xdp->data_meta + L1_CACHE_BYTES); L1_CACHE_BYTES 1313 drivers/net/ethernet/intel/iavf/iavf_txrx.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 1314 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 1380 drivers/net/ethernet/intel/iavf/iavf_txrx.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 1381 drivers/net/ethernet/intel/iavf/iavf_txrx.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 701 drivers/net/ethernet/intel/ice/ice_txrx.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 702 drivers/net/ethernet/intel/ice/ice_txrx.c prefetch((u8 *)va + L1_CACHE_BYTES); L1_CACHE_BYTES 8022 drivers/net/ethernet/intel/igb/igb_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 8023 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 8079 drivers/net/ethernet/intel/igb/igb_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 8080 drivers/net/ethernet/intel/igb/igb_main.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 1263 drivers/net/ethernet/intel/igc/igc_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 1264 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 1302 drivers/net/ethernet/intel/igc/igc_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 1303 drivers/net/ethernet/intel/igc/igc_main.c prefetch(va + L1_CACHE_BYTES); L1_CACHE_BYTES 2101 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2102 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c prefetch(xdp->data + L1_CACHE_BYTES); L1_CACHE_BYTES 2167 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 2168 drivers/net/ethernet/intel/ixgbe/ixgbe_main.c prefetch(xdp->data_meta + L1_CACHE_BYTES); L1_CACHE_BYTES 873 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 874 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c prefetch(xdp->data + L1_CACHE_BYTES); L1_CACHE_BYTES 954 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 955 drivers/net/ethernet/intel/ixgbevf/ixgbevf_main.c prefetch(xdp->data_meta + L1_CACHE_BYTES); L1_CACHE_BYTES 54 drivers/net/ethernet/mellanox/mlx5/core/en/xdp.h #if L1_CACHE_BYTES < 128 L1_CACHE_BYTES 747 drivers/net/ethernet/microchip/lan743x_main.h #define DEFAULT_DMA_DESCRIPTOR_SPACING (L1_CACHE_BYTES) L1_CACHE_BYTES 375 drivers/net/ethernet/neterion/vxge/vxge-main.c prefetch((char *)dtr + L1_CACHE_BYTES); L1_CACHE_BYTES 400 drivers/net/ethernet/neterion/vxge/vxge-main.c prefetch((char *)skb + L1_CACHE_BYTES); L1_CACHE_BYTES 2566 drivers/net/ethernet/qlogic/qed/qed_dev.c cache_line_size = min_t(u32, L1_CACHE_BYTES, wr_mbs); L1_CACHE_BYTES 2586 drivers/net/ethernet/qlogic/qed/qed_dev.c if (L1_CACHE_BYTES > wr_mbs) L1_CACHE_BYTES 2589 drivers/net/ethernet/qlogic/qed/qed_dev.c L1_CACHE_BYTES, wr_mbs); L1_CACHE_BYTES 2433 drivers/net/ethernet/qlogic/qed/qed_ll2.c L1_CACHE_BYTES + params->mtu; L1_CACHE_BYTES 661 drivers/net/ethernet/sfc/efx.c BUILD_BUG_ON(EFX_RX_USR_BUF_SIZE % L1_CACHE_BYTES); L1_CACHE_BYTES 615 drivers/net/ethernet/sfc/falcon/efx.c BUILD_BUG_ON(EF4_RX_USR_BUF_SIZE % L1_CACHE_BYTES); L1_CACHE_BYTES 89 drivers/net/ethernet/sfc/falcon/net_driver.h #define EF4_RX_BUF_ALIGNMENT L1_CACHE_BYTES L1_CACHE_BYTES 89 drivers/net/ethernet/sfc/net_driver.h #define EFX_RX_BUF_ALIGNMENT L1_CACHE_BYTES L1_CACHE_BYTES 28 drivers/net/ethernet/sfc/tx.c #define EFX_PIOBUF_SIZE_DEF ALIGN(256, L1_CACHE_BYTES) L1_CACHE_BYTES 195 drivers/net/ethernet/sfc/tx.c u8 buf[L1_CACHE_BYTES]; L1_CACHE_BYTES 313 drivers/net/ethernet/sfc/tx.c BUILD_BUG_ON(L1_CACHE_BYTES > L1_CACHE_BYTES 316 drivers/net/ethernet/sfc/tx.c ALIGN(skb->len, L1_CACHE_BYTES) >> 3); L1_CACHE_BYTES 1162 drivers/net/virtio_net.c return ALIGN(len, L1_CACHE_BYTES); L1_CACHE_BYTES 32 drivers/net/wireless/ath/ath5k/ahb.c *csz = L1_CACHE_BYTES >> 2; L1_CACHE_BYTES 70 drivers/net/wireless/ath/ath5k/pci.c *csz = L1_CACHE_BYTES >> 2; /* Use the default size */ L1_CACHE_BYTES 213 drivers/net/wireless/ath/ath5k/pci.c csz = L1_CACHE_BYTES >> 2; L1_CACHE_BYTES 225 drivers/net/wireless/ath/ath6kl/init.c reserved = roundup((2 * L1_CACHE_BYTES) + ATH6KL_DATA_OFFSET + L1_CACHE_BYTES 230 drivers/net/wireless/ath/ath6kl/init.c skb_reserve(skb, reserved - L1_CACHE_BYTES); L1_CACHE_BYTES 359 drivers/net/wireless/ath/ath6kl/sdio.c size = 2 * L1_CACHE_BYTES + L1_CACHE_BYTES 56 drivers/net/wireless/ath/ath9k/ahb.c *csz = L1_CACHE_BYTES >> 2; L1_CACHE_BYTES 516 drivers/net/wireless/ath/ath9k/htc_drv_init.c *csz = L1_CACHE_BYTES >> 2; L1_CACHE_BYTES 923 drivers/net/wireless/ath/ath9k/pci.c csz = L1_CACHE_BYTES / sizeof(u32); L1_CACHE_BYTES 143 drivers/net/wireless/realtek/rtl818x/rtl8187/rtl8187.h u8 dummy1[L1_CACHE_BYTES]; L1_CACHE_BYTES 152 drivers/net/wireless/realtek/rtl818x/rtl8187/rtl8187.h u8 dummy2[L1_CACHE_BYTES]; L1_CACHE_BYTES 1093 drivers/nvdimm/btt.c u32 idx = (premap * MAP_ENT_SIZE / L1_CACHE_BYTES) % arena->nfree; L1_CACHE_BYTES 1101 drivers/nvdimm/btt.c u32 idx = (premap * MAP_ENT_SIZE / L1_CACHE_BYTES) % arena->nfree; L1_CACHE_BYTES 128 drivers/nvdimm/btt.h u8 cacheline_padding[L1_CACHE_BYTES]; L1_CACHE_BYTES 763 drivers/parisc/ccio-dma.c if((size % L1_CACHE_BYTES) || ((unsigned long)addr % L1_CACHE_BYTES)) L1_CACHE_BYTES 545 drivers/parisc/dino.c PCI_CACHE_LINE_SIZE, 2, 0xff00 | L1_CACHE_BYTES/4); L1_CACHE_BYTES 1722 drivers/parisc/sba_iommu.c &(sba_dev->ioc[i].res_map[L1_CACHE_BYTES]); L1_CACHE_BYTES 104 drivers/pci/pci.c u8 pci_dfl_cache_line_size = L1_CACHE_BYTES >> 2; L1_CACHE_BYTES 1047 drivers/pcmcia/yenta_socket.c config_writeb(socket, PCI_CACHE_LINE_SIZE, L1_CACHE_BYTES / 4); L1_CACHE_BYTES 136 drivers/s390/virtio/virtio_ccw.c #define VIRTIO_IV_BITS (L1_CACHE_BYTES * 8) L1_CACHE_BYTES 1313 drivers/scsi/csiostor/csio_wr.c uint32_t clsz = L1_CACHE_BYTES; L1_CACHE_BYTES 71 drivers/soc/fsl/qbman/dpaa_sys.h #if (L1_CACHE_BYTES == 32) L1_CACHE_BYTES 376 drivers/soc/fsl/qbman/qman_test_stash.c u8 res = (offset + (L1_CACHE_BYTES - 1)) L1_CACHE_BYTES 377 drivers/soc/fsl/qbman/qman_test_stash.c / (L1_CACHE_BYTES); L1_CACHE_BYTES 572 drivers/soc/fsl/qbman/qman_test_stash.c sizeof(struct hp_handler), L1_CACHE_BYTES, L1_CACHE_BYTES 112 fs/dcache.c hash += (unsigned long) parent / L1_CACHE_BYTES; L1_CACHE_BYTES 121 fs/file.c 2 * nr / BITS_PER_BYTE + BITBIT_SIZE(nr), L1_CACHE_BYTES), L1_CACHE_BYTES 480 fs/inode.c L1_CACHE_BYTES; L1_CACHE_BYTES 92 fs/namespace.c unsigned long tmp = ((unsigned long)mnt / L1_CACHE_BYTES); L1_CACHE_BYTES 93 fs/namespace.c tmp += ((unsigned long)dentry / L1_CACHE_BYTES); L1_CACHE_BYTES 100 fs/namespace.c unsigned long tmp = ((unsigned long)dentry / L1_CACHE_BYTES); L1_CACHE_BYTES 9 include/linux/cache.h #define L1_CACHE_ALIGN(x) __ALIGN_KERNEL(x, L1_CACHE_BYTES) L1_CACHE_BYTES 13 include/linux/cache.h #define SMP_CACHE_BYTES L1_CACHE_BYTES L1_CACHE_BYTES 79 include/linux/cache.h #define cache_line_size() L1_CACHE_BYTES L1_CACHE_BYTES 676 include/linux/iio/iio.h #define IIO_ALIGN L1_CACHE_BYTES L1_CACHE_BYTES 51 include/linux/prefetch.h #define PREFETCH_STRIDE (4*L1_CACHE_BYTES) L1_CACHE_BYTES 2636 include/linux/skbuff.h #define NET_SKB_PAD max(32, L1_CACHE_BYTES) L1_CACHE_BYTES 223 kernel/audit_tree.c unsigned long n = key / L1_CACHE_BYTES; L1_CACHE_BYTES 807 kernel/fork.c int align = max_t(int, L1_CACHE_BYTES, ARCH_MIN_TASKALIGN); L1_CACHE_BYTES 29 lib/atomic64.c char pad[L1_CACHE_BYTES]; L1_CACHE_BYTES 47 lib/btree.c #define NODESIZE MAX(L1_CACHE_BYTES, 128) L1_CACHE_BYTES 3642 mm/shmem.c L1_CACHE_BYTES), GFP_KERNEL); L1_CACHE_BYTES 2997 mm/swapfile.c DIV_ROUND_UP(L1_CACHE_BYTES, sizeof(struct swap_cluster_info)) L1_CACHE_BYTES 2280 net/core/dev.c if (maps_sz < L1_CACHE_BYTES) L1_CACHE_BYTES 2281 net/core/dev.c maps_sz = L1_CACHE_BYTES; L1_CACHE_BYTES 28 net/core/dev_addr_lists.c if (alloc_size < L1_CACHE_BYTES) L1_CACHE_BYTES 29 net/core/dev_addr_lists.c alloc_size = L1_CACHE_BYTES; L1_CACHE_BYTES 729 net/core/net-sysfs.c RPS_MAP_SIZE(cpumask_weight(mask)), L1_CACHE_BYTES), L1_CACHE_BYTES 2908 net/core/skbuff.c skb_headlen(from) < L1_CACHE_BYTES || L1_CACHE_BYTES 305 net/ipv4/esp4.c allocsize = ALIGN(tailen, L1_CACHE_BYTES); L1_CACHE_BYTES 421 net/ipv4/esp4.c allocsize = ALIGN(skb->data_len, L1_CACHE_BYTES); L1_CACHE_BYTES 1529 net/ipv4/fib_frontend.c size = max_t(size_t, size, L1_CACHE_BYTES); L1_CACHE_BYTES 811 net/ipv4/inet_hashtables.c nblocks = max(2U * L1_CACHE_BYTES / locksz, 1U); L1_CACHE_BYTES 250 net/ipv6/esp6.c allocsize = ALIGN(tailen, L1_CACHE_BYTES); L1_CACHE_BYTES 362 net/ipv6/esp6.c allocsize = ALIGN(skb->data_len, L1_CACHE_BYTES); L1_CACHE_BYTES 2260 net/ipv6/ip6_fib.c size = max_t(size_t, size, L1_CACHE_BYTES); L1_CACHE_BYTES 104 net/packet/internal.h #define ROLLOVER_HLEN (L1_CACHE_BYTES / sizeof(u32))