Home
last modified time | relevance | path

Searched refs:L1_CACHE_BYTES (Results 1 – 191 of 191) sorted by relevance

/linux-4.1.27/arch/sh/mm/
Dflush-sh4.c18 v = aligned_start & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
19 end = (aligned_start + size + L1_CACHE_BYTES-1) in sh4__flush_wback_region()
20 & ~(L1_CACHE_BYTES-1); in sh4__flush_wback_region()
21 cnt = (end - v) / L1_CACHE_BYTES; in sh4__flush_wback_region()
24 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
25 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
26 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
27 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
28 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
29 __ocbwb(v); v += L1_CACHE_BYTES; in sh4__flush_wback_region()
[all …]
Dcache-sh2a.c58 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
59 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_wback_region()
60 & ~(L1_CACHE_BYTES-1); in sh2a__flush_wback_region()
71 for (v = begin; v < end; v += L1_CACHE_BYTES) { in sh2a__flush_wback_region()
79 for (v = begin; v < end; v += L1_CACHE_BYTES) in sh2a__flush_wback_region()
98 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
99 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2a__flush_purge_region()
100 & ~(L1_CACHE_BYTES-1); in sh2a__flush_purge_region()
105 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2a__flush_purge_region()
128 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2a__flush_invalidate_region()
[all …]
Dcache-sh2.c24 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
25 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_wback_region()
26 & ~(L1_CACHE_BYTES-1); in sh2__flush_wback_region()
27 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh2__flush_wback_region()
45 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
46 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_purge_region()
47 & ~(L1_CACHE_BYTES-1); in sh2__flush_purge_region()
49 for (v = begin; v < end; v+=L1_CACHE_BYTES) in sh2__flush_purge_region()
76 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh2__flush_invalidate_region()
77 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh2__flush_invalidate_region()
[all …]
Dcache-sh3.c41 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
42 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_wback_region()
43 & ~(L1_CACHE_BYTES-1); in sh3__flush_wback_region()
45 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_wback_region()
79 begin = (unsigned long)start & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
80 end = ((unsigned long)start + size + L1_CACHE_BYTES-1) in sh3__flush_purge_region()
81 & ~(L1_CACHE_BYTES-1); in sh3__flush_purge_region()
83 for (v = begin; v < end; v+=L1_CACHE_BYTES) { in sh3__flush_purge_region()
Dcache-sh5.c83 addr += L1_CACHE_BYTES; in sh64_icache_inv_kernel_range()
228 addr += L1_CACHE_BYTES; in sh64_icache_inv_current_user_range()
234 #define DUMMY_ALLOCO_AREA_SIZE ((L1_CACHE_BYTES << 10) + (1024 * 4))
345 magic_eaddr += L1_CACHE_BYTES; in sh64_dcache_purge_coloured_phy_page()
374 eaddr += L1_CACHE_BYTES; in sh64_dcache_purge_phy_page()
599 unsigned long end = (unsigned long)vaddr + L1_CACHE_BYTES; in sh5_flush_cache_sigtramp()
601 __flush_wback_region(vaddr, L1_CACHE_BYTES); in sh5_flush_cache_sigtramp()
Dcache-sh4.c60 start &= ~(L1_CACHE_BYTES-1); in sh4_flush_icache_range()
61 end += L1_CACHE_BYTES-1; in sh4_flush_icache_range()
62 end &= ~(L1_CACHE_BYTES-1); in sh4_flush_icache_range()
67 for (v = start; v < end; v += L1_CACHE_BYTES) { in sh4_flush_icache_range()
/linux-4.1.27/arch/mn10300/include/asm/
Dcache.h19 #define L1_CACHE_DISPARITY (L1_CACHE_NENTRIES * L1_CACHE_BYTES)
21 #define L1_CACHE_DISPARITY L1_CACHE_NENTRIES * L1_CACHE_BYTES
24 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
33 (ENTRY) * L1_CACHE_BYTES, u32)
36 __SYSREG(0xc8400000 + 0 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32)
38 __SYSREG(0xc8400000 + 1 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32)
40 __SYSREG(0xc8400000 + 2 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32)
42 __SYSREG(0xc8400000 + 3 * L1_CACHE_WAYDISP + (ENTRY) * L1_CACHE_BYTES, u32)
47 (ENTRY) * L1_CACHE_BYTES + (OFF) * 4, u32)
50 (ENTRY) * L1_CACHE_BYTES, u32)
[all …]
/linux-4.1.27/arch/score/mm/
Dcache.c45 for (i = 0; i < (PAGE_SIZE / L1_CACHE_BYTES); i += L1_CACHE_BYTES) { in flush_data_cache_page()
51 addr += L1_CACHE_BYTES; in flush_data_cache_page()
250 start = start & ~(L1_CACHE_BYTES - 1); in flush_dcache_range()
251 end = end & ~(L1_CACHE_BYTES - 1); in flush_dcache_range()
254 for (i = 0; i < size; i += L1_CACHE_BYTES) { in flush_dcache_range()
261 start += L1_CACHE_BYTES; in flush_dcache_range()
268 start = start & ~(L1_CACHE_BYTES - 1); in flush_icache_range()
269 end = end & ~(L1_CACHE_BYTES - 1); in flush_icache_range()
273 for (i = 0; i < size; i += L1_CACHE_BYTES) { in flush_icache_range()
278 start += L1_CACHE_BYTES; in flush_icache_range()
/linux-4.1.27/arch/frv/lib/
Dcache.S27 andi gr8,~(L1_CACHE_BYTES-1),gr8
30 addi gr8,#L1_CACHE_BYTES,gr8
47 andi gr8,~(L1_CACHE_BYTES-1),gr8
51 addi gr8,#L1_CACHE_BYTES,gr8
68 andi gr8,~(L1_CACHE_BYTES-1),gr8
71 addi gr8,#L1_CACHE_BYTES,gr8
88 andi gr8,~(L1_CACHE_BYTES-1),gr8
92 addi gr8,#L1_CACHE_BYTES,gr8
/linux-4.1.27/arch/hexagon/include/asm/
Dcache.h26 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
28 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
30 #define __cacheline_aligned __aligned(L1_CACHE_BYTES)
31 #define ____cacheline_aligned __aligned(L1_CACHE_BYTES)
/linux-4.1.27/arch/arm/lib/
Dcopy_page.S17 #define COPY_COUNT (PAGE_SZ / (2 * L1_CACHE_BYTES) PLD( -1 ))
30 PLD( pld [r1, #L1_CACHE_BYTES] )
33 1: PLD( pld [r1, #2 * L1_CACHE_BYTES])
34 PLD( pld [r1, #3 * L1_CACHE_BYTES])
36 .rept (2 * L1_CACHE_BYTES / 16 - 1)
/linux-4.1.27/arch/blackfin/include/asm/
Dcache.h17 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
20 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
32 __attribute__((__aligned__(L1_CACHE_BYTES), \
/linux-4.1.27/arch/parisc/include/asm/
Dcache.h18 #define L1_CACHE_BYTES 64 macro
21 #define L1_CACHE_BYTES 32 macro
27 #define SMP_CACHE_BYTES L1_CACHE_BYTES
29 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
Datomic.h29 # define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) (a))/L1_CACHE_BYTES) & (ATOMIC_HASH_SI…
/linux-4.1.27/arch/arc/kernel/
Dvmlinux.lds.S66 INIT_TEXT_SECTION(L1_CACHE_BYTES)
71 INIT_SETUP(L1_CACHE_BYTES)
83 PERCPU_SECTION(L1_CACHE_BYTES)
105 EXCEPTION_TABLE(L1_CACHE_BYTES)
115 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/linux-4.1.27/arch/alpha/include/asm/
Dcache.h10 # define L1_CACHE_BYTES 64 macro
16 # define L1_CACHE_BYTES 32 macro
20 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/linux-4.1.27/arch/frv/include/asm/
Dcache.h18 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
20 #define __cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES)))
21 #define ____cacheline_aligned __attribute__((aligned(L1_CACHE_BYTES)))
Dmem-layout.h38 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
39 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/arm64/include/asm/
Dcache.h22 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
31 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
40 return cwg ? 4 << cwg : L1_CACHE_BYTES; in cache_line_size()
/linux-4.1.27/arch/mn10300/proc-mn103e010/include/proc/
Dcache.h18 #define L1_CACHE_BYTES 16 /* bytes per entry */ macro
39 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
41 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
/linux-4.1.27/arch/mn10300/proc-mn2ws0050/include/proc/
Dcache.h24 #define L1_CACHE_BYTES 32 /* bytes per entry */ macro
45 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
47 +(L1_CACHE_NWAYS * L1_CACHE_NENTRIES * L1_CACHE_BYTES)
/linux-4.1.27/arch/xtensa/include/asm/
Dcache.h17 #define L1_CACHE_BYTES XCHAL_DCACHE_LINESIZE macro
18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
32 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/frv/kernel/
Dvmlinux.lds.S40 PERCPU_SECTION(L1_CACHE_BYTES)
95 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
105 . = ALIGN(L1_CACHE_BYTES);
112 . = ALIGN(L1_CACHE_BYTES);
Dsleep.S96 addi gr3,#L1_CACHE_BYTES,gr3
130 .balign L1_CACHE_BYTES
282 addi gr3,#L1_CACHE_BYTES,gr3
307 addi gr3,#L1_CACHE_BYTES,gr3
313 .balign L1_CACHE_BYTES
367 addi gr3,#L1_CACHE_BYTES,gr3
Dcmode.S94 addi gr3,#L1_CACHE_BYTES,gr3
108 .balign L1_CACHE_BYTES
175 addi gr3,#L1_CACHE_BYTES,gr3
Dentry.S882 .balign L1_CACHE_BYTES
/linux-4.1.27/arch/unicore32/kernel/
Dvmlinux.lds.S33 PERCPU_SECTION(L1_CACHE_BYTES)
49 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
52 EXCEPTION_TABLE(L1_CACHE_BYTES)
Dpci.c234 L1_CACHE_BYTES >> 2); in pcibios_fixup_bus()
/linux-4.1.27/include/linux/
Dcache.h8 #define L1_CACHE_ALIGN(x) __ALIGN_KERNEL(x, L1_CACHE_BYTES)
12 #define SMP_CACHE_BYTES L1_CACHE_BYTES
64 #define cache_line_size() L1_CACHE_BYTES
Dprefetch.h50 #define PREFETCH_STRIDE (4*L1_CACHE_BYTES)
Dskbuff.h2031 #define NET_SKB_PAD max(32, L1_CACHE_BYTES)
Dnetdevice.h720 #define XPS_MIN_MAP_ALLOC ((L1_CACHE_BYTES - sizeof(struct xps_map)) \
/linux-4.1.27/arch/arc/include/asm/
Dcache.h19 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
20 #define CACHE_LINE_MASK (~(L1_CACHE_BYTES - 1))
50 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/nios2/kernel/
Dvmlinux.lds.S53 EXCEPTION_TABLE(L1_CACHE_BYTES)
59 PERCPU_SECTION(L1_CACHE_BYTES)
64 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/linux-4.1.27/arch/powerpc/kernel/
Dmisc_32.S353 li r5,L1_CACHE_BYTES-1
362 addi r3,r3,L1_CACHE_BYTES
368 addi r6,r6,L1_CACHE_BYTES
387 li r5,L1_CACHE_BYTES-1
396 addi r3,r3,L1_CACHE_BYTES
408 li r5,L1_CACHE_BYTES-1
417 addi r3,r3,L1_CACHE_BYTES
430 li r5,L1_CACHE_BYTES-1
439 addi r3,r3,L1_CACHE_BYTES
458 li r4,PAGE_SIZE/L1_CACHE_BYTES /* Number of lines in a page */
[all …]
Dvmlinux.lds.S155 PERCPU_SECTION(L1_CACHE_BYTES)
237 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
241 READ_MOSTLY_DATA(L1_CACHE_BYTES)
Dcpu_setup_ppc970.S68 .balign L1_CACHE_BYTES,0
71 .balign L1_CACHE_BYTES,0
Dcpu_setup_6xx.S308 .balign L1_CACHE_BYTES
311 .balign L1_CACHE_BYTES,0
Dvdso.c775 vdso_data->dcache_block_size = L1_CACHE_BYTES; in vdso_init()
777 vdso_data->icache_block_size = L1_CACHE_BYTES; in vdso_init()
Dl2cr_6xx.S205 .balign L1_CACHE_BYTES
Dalign.c208 size = L1_CACHE_BYTES; in emulate_dcbz()
Dhead_32.S789 4: li r0,L1_CACHE_BYTES/4
/linux-4.1.27/arch/c6x/include/asm/
Dcache.h39 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
48 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
49 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/m68k/include/asm/
Dcache.h9 #define L1_CACHE_BYTES (1<< L1_CACHE_SHIFT) macro
11 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/unicore32/include/asm/
Dcache.h16 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
25 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
Dtlbflush.h159 : : "r" (__pa(pmd) & ~(L1_CACHE_BYTES - 1)) : "cc"); in clean_pmd_entry()
/linux-4.1.27/arch/microblaze/include/asm/
Dcache.h20 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
22 #define SMP_CACHE_BYTES L1_CACHE_BYTES
Dpage.h43 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
45 #define ARCH_SLAB_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/mips/include/asm/
Dcache.h15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
18 #define SMP_CACHE_BYTES L1_CACHE_BYTES
/linux-4.1.27/arch/metag/include/asm/
Dcache.h6 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
16 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/arm/include/asm/
Dcache.h8 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
17 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/ia64/include/asm/
Dcache.h12 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
16 # define SMP_CACHE_BYTES L1_CACHE_BYTES
Dprocessor.h684 #define PREFETCH_STRIDE L1_CACHE_BYTES
/linux-4.1.27/arch/arc/mm/
Dcache_arc700.c164 if (ic->line_len != L1_CACHE_BYTES) in arc_cache_init()
166 ic->line_len, L1_CACHE_BYTES); in arc_cache_init()
180 if (dc->line_len != L1_CACHE_BYTES) in arc_cache_init()
182 dc->line_len, L1_CACHE_BYTES); in arc_cache_init()
235 num_lines = DIV_ROUND_UP(sz, L1_CACHE_BYTES); in __cache_line_loop()
251 paddr += L1_CACHE_BYTES; in __cache_line_loop()
255 vaddr += L1_CACHE_BYTES; in __cache_line_loop()
258 paddr += L1_CACHE_BYTES; in __cache_line_loop()
/linux-4.1.27/arch/nios2/include/asm/
Dcache.h29 #define L1_CACHE_BYTES NIOS2_ICACHE_LINE_SIZE macro
31 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/avr32/include/asm/
Dcache.h5 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
14 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
Dprocessor.h163 #define PREFETCH_STRIDE L1_CACHE_BYTES
/linux-4.1.27/arch/powerpc/lib/
Dcopy_32.S68 CACHELINE_BYTES = L1_CACHE_BYTES
70 CACHELINE_MASK = (L1_CACHE_BYTES-1)
251 #if L1_CACHE_BYTES >= 32
253 #if L1_CACHE_BYTES >= 64
256 #if L1_CACHE_BYTES >= 128
309 #if L1_CACHE_BYTES >= 32
311 #if L1_CACHE_BYTES >= 64
314 #if L1_CACHE_BYTES >= 128
/linux-4.1.27/arch/ia64/sn/kernel/
Dbte.c27 #define L1_CACHE_MASK (L1_CACHE_BYTES - 1)
282 bteBlock_unaligned = kmalloc(len + 3 * L1_CACHE_BYTES, GFP_KERNEL); in bte_unaligned_copy()
324 (L1_CACHE_BYTES - in bte_unaligned_copy()
325 headBcopySrcOffset) ? L1_CACHE_BYTES in bte_unaligned_copy()
327 headBteLen = L1_CACHE_BYTES; in bte_unaligned_copy()
335 footBteLen = L1_CACHE_BYTES; in bte_unaligned_copy()
/linux-4.1.27/drivers/misc/sgi-xp/
Dxpnet.c120 #define XPNET_MAX_MTU (0x800000UL - L1_CACHE_BYTES)
167 skb = dev_alloc_skb(msg->size + L1_CACHE_BYTES); in xpnet_receive()
170 msg->size + L1_CACHE_BYTES); in xpnet_receive()
184 skb_reserve(skb, (L1_CACHE_BYTES - ((u64)skb->data & in xpnet_receive()
185 (L1_CACHE_BYTES - 1)) + in xpnet_receive()
206 dst = (void *)((u64)skb->data & ~(L1_CACHE_BYTES - 1)); in xpnet_receive()
458 start_addr = ((u64)skb->data & ~(L1_CACHE_BYTES - 1)); in xpnet_dev_hard_start_xmit()
Dxpc_sn2.c1352 u8 buffer[L1_CACHE_BYTES * 2]; in xpc_pull_remote_vars_part_sn2()
1365 DBUG_ON(sizeof(struct xpc_vars_part_sn2) != L1_CACHE_BYTES / 2); in xpc_pull_remote_vars_part_sn2()
1370 remote_entry_cacheline_pa = (remote_entry_pa & ~(L1_CACHE_BYTES - 1)); in xpc_pull_remote_vars_part_sn2()
1374 (L1_CACHE_BYTES - 1))); in xpc_pull_remote_vars_part_sn2()
1378 L1_CACHE_BYTES); in xpc_pull_remote_vars_part_sn2()
Dxpc_partition.c54 *base = kmalloc(size + L1_CACHE_BYTES, flags); in xpc_kmalloc_cacheline_aligned()
Dxpc_main.c396 *base = kzalloc(size + L1_CACHE_BYTES, flags); in xpc_kzalloc_cacheline_aligned()
/linux-4.1.27/arch/metag/kernel/
Dvmlinux.lds.S35 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
52 PERCPU_SECTION(L1_CACHE_BYTES)
/linux-4.1.27/arch/alpha/kernel/
Dvmlinux.lds.S44 PERCPU_SECTION(L1_CACHE_BYTES)
53 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
/linux-4.1.27/arch/mn10300/mm/
Dcache-flush-by-reg.S118 # alignsize = L1_CACHE_BYTES;
119 # for (i = (end - start - 1) / L1_CACHE_BYTES ; i > 0; i >>= 1)
123 mov L1_CACHE_BYTES,d2
251 # alignsize = L1_CACHE_BYTES;
252 # for (i = (end - start - 1) / L1_CACHE_BYTES; i > 0; i >>= 1)
256 mov L1_CACHE_BYTES,d2
Dcache-inv-by-reg.S141 mov L1_CACHE_BYTES-1,d2
/linux-4.1.27/arch/powerpc/include/asm/
Dcache.h26 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
28 #define SMP_CACHE_BYTES L1_CACHE_BYTES
Dpage_32.h13 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/mn10300/lib/
D__lshrdi3.S15 .balign L1_CACHE_BYTES
44 .balign L1_CACHE_BYTES
D__ashldi3.S14 .balign L1_CACHE_BYTES
43 .balign L1_CACHE_BYTES
D__ashrdi3.S14 .balign L1_CACHE_BYTES
43 .balign L1_CACHE_BYTES
Dmemset.S14 .balign L1_CACHE_BYTES
Dmemcpy.S14 .balign L1_CACHE_BYTES
Ddo_csum.S14 .balign L1_CACHE_BYTES
Dmemmove.S15 .balign L1_CACHE_BYTES
/linux-4.1.27/arch/sh/kernel/
Dvmlinux.lds.S52 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, THREAD_SIZE)
69 PERCPU_SECTION(L1_CACHE_BYTES)
Dhead_64.S117 .balign L1_CACHE_BYTES
/linux-4.1.27/arch/blackfin/mach-common/
Dcache.S31 R2 = -L1_CACHE_BYTES;
39 R1 += L1_CACHE_BYTES;
/linux-4.1.27/arch/sh/kernel/cpu/sh4/
Dprobe.c42 boot_cpu_data.icache.linesz = L1_CACHE_BYTES; in cpu_probe()
51 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
248 boot_cpu_data.scache.linesz = L1_CACHE_BYTES; in cpu_probe()
/linux-4.1.27/arch/ia64/include/asm/sn/
Dbte.h227 (((len & (L1_CACHE_BYTES - 1)) || \
228 (src & (L1_CACHE_BYTES - 1)) || \
229 (dest & (L1_CACHE_BYTES - 1))) ? \
/linux-4.1.27/arch/mips/pmcs-msp71xx/
Dmsp_setup.c59 for (iptr = (void *)((unsigned int)start & ~(L1_CACHE_BYTES - 1)); in msp7120_reset()
60 iptr < end; iptr += L1_CACHE_BYTES) in msp7120_reset()
/linux-4.1.27/arch/arm/kernel/
Dvmlinux.lds.S227 PERCPU_SECTION(L1_CACHE_BYTES)
263 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
264 READ_MOSTLY_DATA(L1_CACHE_BYTES)
Dbios32.c351 L1_CACHE_BYTES >> 2); in pcibios_fixup_bus()
/linux-4.1.27/arch/cris/include/arch-v10/arch/
Dcache.h5 #define L1_CACHE_BYTES 32 macro
/linux-4.1.27/arch/mips/include/asm/mach-tx49xx/
Dkmalloc.h4 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/openrisc/include/asm/
Dcache.h26 #define L1_CACHE_BYTES 16 macro
/linux-4.1.27/arch/score/include/asm/
Dcache.h5 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/include/asm-generic/
Dcache.h10 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/arch/m32r/include/asm/
Dcache.h6 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/arch/s390/include/asm/
Dcache.h12 #define L1_CACHE_BYTES 256 macro
/linux-4.1.27/arch/um/include/asm/
Dcache.h15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/arch/sparc/include/asm/
Dcache.h13 #define L1_CACHE_BYTES 32 macro
/linux-4.1.27/arch/sh/include/asm/
Dcache.h15 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
Dprocessor_32.h200 #define PREFETCH_STRIDE L1_CACHE_BYTES
Dprocessor.h91 } __attribute__ ((aligned(L1_CACHE_BYTES)));
Dpci.h100 cacheline_size = L1_CACHE_BYTES; in pci_dma_burst_advice()
Dpage.h193 #define ARCH_DMA_MINALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/x86/include/asm/
Dcache.h8 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/arch/cris/include/arch-v32/arch/
Dcache.h7 #define L1_CACHE_BYTES 32 macro
/linux-4.1.27/arch/tile/include/asm/
Dcache.h22 #define L1_CACHE_BYTES (1 << L1_CACHE_SHIFT) macro
/linux-4.1.27/arch/sh/kernel/cpu/sh2/
Dprobe.c24 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
/linux-4.1.27/arch/hexagon/kernel/
Dvmlinux.lds.S44 PERCPU_SECTION(L1_CACHE_BYTES)
/linux-4.1.27/arch/x86/boot/compressed/
Dvmlinux.lds.S56 . = ALIGN(L1_CACHE_BYTES);
/linux-4.1.27/arch/openrisc/kernel/
Dvmlinux.lds.S89 PERCPU_SECTION(L1_CACHE_BYTES)
/linux-4.1.27/arch/sh/kernel/cpu/sh5/
Dprobe.c44 boot_cpu_data.icache.linesz = L1_CACHE_BYTES; in cpu_probe()
/linux-4.1.27/arch/avr32/kernel/
Dvmlinux.lds.S70 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
/linux-4.1.27/arch/sh/kernel/cpu/sh2a/
Dprobe.c50 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
/linux-4.1.27/drivers/net/wireless/ath/ath5k/
Dpci.c71 *csz = L1_CACHE_BYTES >> 2; /* Use the default size */ in ath5k_pci_read_cachesize()
214 csz = L1_CACHE_BYTES >> 2; in ath5k_pci_probe()
Dahb.c32 *csz = L1_CACHE_BYTES >> 2; in ath5k_ahb_read_cachesize()
/linux-4.1.27/arch/parisc/kernel/
Dvmlinux.lds.S126 RW_DATA_SECTION(L1_CACHE_BYTES, PAGE_SIZE, PAGE_SIZE)
Dsyscall.S933 .align L1_CACHE_BYTES
/linux-4.1.27/drivers/net/wireless/rtl818x/rtl8187/
Drtl8187.h146 u8 dummy1[L1_CACHE_BYTES];
155 u8 dummy2[L1_CACHE_BYTES];
/linux-4.1.27/drivers/char/
Duv_mmtimer.c101 ret = ((uv_blade_processor_id() * L1_CACHE_BYTES) % in uv_mmtimer_ioctl()
/linux-4.1.27/arch/powerpc/mm/
Ddma-noncoherent.c330 if ((start & (L1_CACHE_BYTES - 1)) || (size & (L1_CACHE_BYTES - 1))) in __dma_sync()
/linux-4.1.27/arch/sh/kernel/cpu/sh3/
Dprobe.c55 boot_cpu_data.dcache.linesz = L1_CACHE_BYTES; in cpu_probe()
/linux-4.1.27/drivers/crypto/caam/
Dcaamrng.c50 #define RN_BUF_SIZE (0xffff / L1_CACHE_BYTES * \
51 L1_CACHE_BYTES)
/linux-4.1.27/arch/mn10300/kernel/
Dmn10300-serial-low.S40 .balign L1_CACHE_BYTES
114 .balign L1_CACHE_BYTES
/linux-4.1.27/include/asm-generic/bitops/
Datomic.h16 # define ATOMIC_HASH(a) (&(__atomic_hash[ (((unsigned long) a)/L1_CACHE_BYTES) & (ATOMIC_HASH_SIZE…
/linux-4.1.27/arch/arm64/kernel/
Dsetup.c259 if (L1_CACHE_BYTES < cls) in setup_processor()
261 L1_CACHE_BYTES, cls); in setup_processor()
/linux-4.1.27/arch/ia64/kernel/
Dminstate.h84 adds r17=2*L1_CACHE_BYTES,r1; /* really: biggest cache-line size */ \
87 lfetch.fault.excl.nt1 [r17],L1_CACHE_BYTES; \
/linux-4.1.27/arch/blackfin/mach-bf561/
Datomic.S41 r1 = -L1_CACHE_BYTES;
693 r1 = -L1_CACHE_BYTES;
917 r1 = -L1_CACHE_BYTES;
/linux-4.1.27/arch/powerpc/platforms/powermac/
Dsleep.S391 .balign L1_CACHE_BYTES
394 .balign L1_CACHE_BYTES, 0
Dpci.c1006 L1_CACHE_BYTES >> 2); in pmac_pci_enable_device_hook()
/linux-4.1.27/drivers/net/wireless/ath/ath9k/
Dahb.c56 *csz = L1_CACHE_BYTES >> 2; in ath_ahb_read_cachesize()
Dpci.c923 csz = L1_CACHE_BYTES / sizeof(u32); in ath_pci_probe()
Dhtc_drv_init.c514 *csz = L1_CACHE_BYTES >> 2; in ath_usb_read_cachesize()
/linux-4.1.27/lib/
Datomic64.c33 char pad[L1_CACHE_BYTES];
Dbtree.c49 #define NODESIZE MAX(L1_CACHE_BYTES, 128)
/linux-4.1.27/drivers/dma/
Dmic_x100_dma.c381 L1_CACHE_BYTES, DMA_BIDIRECTIONAL); in mic_dma_free_status_dest()
389 ch->status_dest = kzalloc(L1_CACHE_BYTES, GFP_KERNEL); in mic_dma_alloc_status_dest()
393 L1_CACHE_BYTES, DMA_BIDIRECTIONAL); in mic_dma_alloc_status_dest()
/linux-4.1.27/arch/x86/kernel/
Dvmlinux.lds.S138 CACHELINE_ALIGNED_DATA(L1_CACHE_BYTES)
Dhead_32.S767 .align L1_CACHE_BYTES
/linux-4.1.27/arch/parisc/lib/
Dmemcpy.c336 if (L1_CACHE_BYTES <= 32) in pa_memcpy_internal()
337 prefetch_src((char *)pds + L1_CACHE_BYTES); in pa_memcpy_internal()
/linux-4.1.27/arch/ia64/mm/
Ddiscontig.c123 pernodesize += node * L1_CACHE_BYTES; in compute_pernodesize()
279 pernode += node * L1_CACHE_BYTES; in fill_pernode()
/linux-4.1.27/drivers/net/ethernet/sfc/
Dtx.c31 #define EFX_PIOBUF_SIZE_DEF ALIGN(256, L1_CACHE_BYTES)
180 u8 buf[L1_CACHE_BYTES];
298 BUILD_BUG_ON(L1_CACHE_BYTES > in efx_enqueue_skb_pio()
301 ALIGN(skb->len, L1_CACHE_BYTES) >> 3); in efx_enqueue_skb_pio()
Dnet_driver.h88 #define EFX_RX_BUF_ALIGNMENT L1_CACHE_BYTES
Defx.c606 BUILD_BUG_ON(EFX_RX_USR_BUF_SIZE % L1_CACHE_BYTES); in efx_start_datapath()
/linux-4.1.27/arch/arm/mach-ks8695/
Dpci.c111 ks8695_local_writeconfig(PCI_CACHE_LINE_SIZE, (32 << 8) | (L1_CACHE_BYTES / sizeof(u32))); in ks8695_pci_setup()
/linux-4.1.27/drivers/ata/
Dpata_hpt366.c313 pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); in hpt36x_init_chipset()
Dpata_hpt3x2n.c538 pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); in hpt3x2n_init_one()
Dpata_hpt37x.c913 pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); in hpt37x_init_one()
/linux-4.1.27/drivers/infiniband/hw/ipath/
Dipath_init_chip.c415 ((2 * L1_CACHE_BYTES + in init_pioavailregs()
416 dd->ipath_pioavregs * sizeof(u64)) & ~L1_CACHE_BYTES)); in init_pioavailregs()
425 dd->ipath_freezelen = L1_CACHE_BYTES - sizeof(dd->ipath_statusp[0]); in init_pioavailregs()
/linux-4.1.27/net/core/
Ddev_addr_lists.c32 if (alloc_size < L1_CACHE_BYTES) in __hw_addr_create_ex()
33 alloc_size = L1_CACHE_BYTES; in __hw_addr_create_ex()
Dnet-sysfs.c689 RPS_MAP_SIZE(cpumask_weight(mask)), L1_CACHE_BYTES), in store_rps_map()
Dskbuff.c2245 skb_headlen(from) < L1_CACHE_BYTES || in skb_zerocopy_headlen()
Ddev.c2003 int maps_sz = max_t(unsigned int, XPS_DEV_MAPS_SIZE, L1_CACHE_BYTES); in netif_set_xps_queue()
/linux-4.1.27/drivers/net/ethernet/mellanox/mlx5/core/
Dalloc.c170 offset = db->index * L1_CACHE_BYTES; in mlx5_alloc_db_from_pgdir()
/linux-4.1.27/arch/x86/platform/uv/
Duv_time.c307 offset = (uv_blade_processor_id() * L1_CACHE_BYTES) % PAGE_SIZE; in uv_read_rtc()
/linux-4.1.27/arch/unicore32/mm/
Dinit.c144 bitmap = memblock_alloc_base(boot_pages << PAGE_SHIFT, L1_CACHE_BYTES, in uc32_bootmem_init()
/linux-4.1.27/include/linux/iio/
Diio.h585 #define IIO_ALIGN L1_CACHE_BYTES
/linux-4.1.27/arch/powerpc/platforms/52xx/
Dlite5200_sleep.S410 addi r3,r3,L1_CACHE_BYTES /* Next line, please */
/linux-4.1.27/drivers/net/ethernet/intel/fm10k/
Dfm10k_main.c276 unsigned int truesize = ALIGN(size, L1_CACHE_BYTES); in fm10k_add_rx_frag()
316 #if L1_CACHE_BYTES < 128 in fm10k_fetch_rx_buffer()
317 prefetch(page_addr + L1_CACHE_BYTES); in fm10k_fetch_rx_buffer()
/linux-4.1.27/drivers/parisc/
Dccio-dma.c770 if((size % L1_CACHE_BYTES) || ((unsigned long)addr % L1_CACHE_BYTES)) in ccio_map_single()
Ddino.c523 PCI_CACHE_LINE_SIZE, 2, 0xff00 | L1_CACHE_BYTES/4); in dino_card_fixup()
Dsba_iommu.c1716 &(sba_dev->ioc[i].res_map[L1_CACHE_BYTES]); in sba_common_init()
/linux-4.1.27/arch/powerpc/xmon/
Dxmon.c1585 nflush = (nflush + L1_CACHE_BYTES - 1) / L1_CACHE_BYTES; in cacheflush()
1591 for (; nflush > 0; --nflush, adrs += L1_CACHE_BYTES) in cacheflush()
1594 for (; nflush > 0; --nflush, adrs += L1_CACHE_BYTES) in cacheflush()
/linux-4.1.27/arch/arm/common/
DbL_switcher.c129 stack = PTR_ALIGN(stack, L1_CACHE_BYTES); in bL_switchpoint()
/linux-4.1.27/drivers/iommu/
Domap-iommu.c603 first += L1_CACHE_BYTES / sizeof(*first); in flush_iopgd_range()
613 first += L1_CACHE_BYTES / sizeof(*first); in flush_iopte_range()
/linux-4.1.27/include/linux/mlx5/
Ddriver.h545 MLX5_DB_PER_PAGE = PAGE_SIZE / L1_CACHE_BYTES,
/linux-4.1.27/arch/metag/lib/
Dusercopy.c18 #define RAPF_MIN_BUF_SIZE (3*L1_CACHE_BYTES)
/linux-4.1.27/drivers/infiniband/hw/qib/
Dqib_init.c343 ((2 * L1_CACHE_BYTES + in init_pioavailregs()
344 dd->pioavregs * sizeof(u64)) & ~L1_CACHE_BYTES)); in init_pioavailregs()
/linux-4.1.27/drivers/net/cris/
Deth_v10.c333 RxDescList[i].skb = dev_alloc_skb(MAX_MEDIA_DATA_SIZE + 2 * L1_CACHE_BYTES); in etrax_ethernet_init()
1291 struct sk_buff *new_skb = dev_alloc_skb(MAX_MEDIA_DATA_SIZE + 2 * L1_CACHE_BYTES); in e100_rx()
/linux-4.1.27/drivers/hsi/clients/
Dcmt_speech.c899 hi->slot_size, hi->buf_size, L1_CACHE_BYTES); in cs_hsi_data_enable()
904 data_start, sizeof(*hi->mmap_cfg), L1_CACHE_BYTES); in cs_hsi_data_enable()
/linux-4.1.27/drivers/net/wireless/ath/ath6kl/
Dinit.c224 reserved = roundup((2 * L1_CACHE_BYTES) + ATH6KL_DATA_OFFSET + in ath6kl_buf_alloc()
229 skb_reserve(skb, reserved - L1_CACHE_BYTES); in ath6kl_buf_alloc()
Dsdio.c357 size = 2 * L1_CACHE_BYTES + in ath6kl_sdio_alloc_prep_scat_req()
/linux-4.1.27/fs/
Dfile.c118 2 * nr / BITS_PER_BYTE, L1_CACHE_BYTES)); in alloc_fdtable()
Dnamespace.c83 unsigned long tmp = ((unsigned long)mnt / L1_CACHE_BYTES); in m_hash()
84 tmp += ((unsigned long)dentry / L1_CACHE_BYTES); in m_hash()
91 unsigned long tmp = ((unsigned long)dentry / L1_CACHE_BYTES); in mp_hash()
Dinode.c447 L1_CACHE_BYTES; in hash()
Ddcache.c110 hash += (unsigned long) parent / L1_CACHE_BYTES; in d_hash()
/linux-4.1.27/kernel/
Daudit_tree.c168 unsigned long n = (unsigned long)inode / L1_CACHE_BYTES; in chunk_hash()
Dfork.c294 #define ARCH_MIN_TASKALIGN L1_CACHE_BYTES in fork_init()
/linux-4.1.27/drivers/ide/
Dhpt366.c920 pci_write_config_byte(dev, PCI_CACHE_LINE_SIZE, (L1_CACHE_BYTES / 4)); in init_chipset_hpt366()
Dpmac.c1484 if (pmif->broken_dma && cur_addr & (L1_CACHE_BYTES - 1)) { in pmac_ide_build_dmatable()
/linux-4.1.27/drivers/net/ethernet/intel/ixgbevf/
Dixgbevf_main.c796 unsigned int truesize = ALIGN(size, L1_CACHE_BYTES); in ixgbevf_add_rx_frag()
861 #if L1_CACHE_BYTES < 128 in ixgbevf_fetch_rx_buffer()
862 prefetch(page_addr + L1_CACHE_BYTES); in ixgbevf_fetch_rx_buffer()
/linux-4.1.27/arch/powerpc/platforms/pseries/
Diommu.c62 inc = L1_CACHE_BYTES; /* invalidate a cacheline of TCEs at a time */ in tce_invalidate_pSeries_sw()
/linux-4.1.27/net/ipv4/
Dfib_frontend.c1157 size = max_t(size_t, size, L1_CACHE_BYTES); in ip_fib_net_init()
/linux-4.1.27/drivers/md/
Ddm-table.c27 #define NODE_SIZE L1_CACHE_BYTES
/linux-4.1.27/arch/powerpc/kvm/
Dbook3s_hv.c327 if (addr & (L1_CACHE_BYTES - 1)) in set_vpa()
375 if ((vpa & (L1_CACHE_BYTES - 1)) || !vpa) in do_h_register_vpa()
/linux-4.1.27/drivers/s390/kvm/
Dvirtio_ccw.c124 #define VIRTIO_IV_BITS (L1_CACHE_BYTES * 8)
/linux-4.1.27/drivers/scsi/csiostor/
Dcsio_wr.c1308 uint32_t clsz = L1_CACHE_BYTES; in csio_wr_fixup_host_params()
/linux-4.1.27/drivers/pcmcia/
Dyenta_socket.c1047 config_writeb(socket, PCI_CACHE_LINE_SIZE, L1_CACHE_BYTES / 4); in yenta_config_init()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb3/
Dsge.c2311 #if L1_CACHE_BYTES < 128 in process_responses()
2312 prefetch(addr + L1_CACHE_BYTES); in process_responses()
/linux-4.1.27/drivers/infiniband/hw/mlx5/
Dmain.c441 resp.cache_line_size = L1_CACHE_BYTES; in mlx5_ib_alloc_ucontext()
/linux-4.1.27/net/ipv6/
Dip6_fib.c1775 size = max_t(size_t, size, L1_CACHE_BYTES); in fib6_net_init()
/linux-4.1.27/drivers/net/
Dvirtio_net.c51 #define MERGEABLE_BUFFER_ALIGN max(L1_CACHE_BYTES, 256)
/linux-4.1.27/drivers/net/ethernet/neterion/vxge/
Dvxge-main.c375 prefetch((char *)dtr + L1_CACHE_BYTES); in vxge_rx_1b_compl()
400 prefetch((char *)skb + L1_CACHE_BYTES); in vxge_rx_1b_compl()
/linux-4.1.27/drivers/net/ethernet/intel/igb/
Digb_main.c6658 unsigned int truesize = ALIGN(size, L1_CACHE_BYTES); in igb_add_rx_frag()
6704 #if L1_CACHE_BYTES < 128 in igb_fetch_rx_buffer()
6705 prefetch(page_addr + L1_CACHE_BYTES); in igb_fetch_rx_buffer()
/linux-4.1.27/drivers/net/ethernet/intel/ixgbe/
Dixgbe_main.c1860 unsigned int truesize = ALIGN(size, L1_CACHE_BYTES); in ixgbe_add_rx_frag()
1928 #if L1_CACHE_BYTES < 128 in ixgbe_fetch_rx_buffer()
1929 prefetch(page_addr + L1_CACHE_BYTES); in ixgbe_fetch_rx_buffer()
/linux-4.1.27/drivers/pci/
Dpci.c92 u8 pci_dfl_cache_line_size = L1_CACHE_BYTES >> 2;
/linux-4.1.27/mm/
Dshmem.c2997 L1_CACHE_BYTES), GFP_KERNEL); in shmem_fill_super()
/linux-4.1.27/drivers/net/ethernet/chelsio/cxgb4/
Dcxgb4_main.c3252 t4_fixup_host_params(adapter, PAGE_SIZE, L1_CACHE_BYTES); in adap_init0_tweaks()