cacheline 355 arch/sparc/kernel/prom_irqtrans.c static unsigned char cacheline[64] cacheline 366 arch/sparc/kernel/prom_irqtrans.c "i" (FPRS_FEF), "r" (&cacheline[0]), cacheline 302 drivers/gpu/drm/i915/gt/intel_engine.h GEM_BUG_ON(cacheline(tail) == cacheline(ring->head) && cacheline 54 drivers/gpu/drm/i915/gt/intel_timeline.c hwsp_alloc(struct intel_timeline *timeline, unsigned int *cacheline) cacheline 92 drivers/gpu/drm/i915/gt/intel_timeline.c *cacheline = __ffs64(hwsp->free_bitmap); cacheline 93 drivers/gpu/drm/i915/gt/intel_timeline.c hwsp->free_bitmap &= ~BIT_ULL(*cacheline); cacheline 103 drivers/gpu/drm/i915/gt/intel_timeline.c static void __idle_hwsp_free(struct intel_timeline_hwsp *hwsp, int cacheline) cacheline 114 drivers/gpu/drm/i915/gt/intel_timeline.c GEM_BUG_ON(cacheline >= BITS_PER_TYPE(hwsp->free_bitmap)); cacheline 115 drivers/gpu/drm/i915/gt/intel_timeline.c hwsp->free_bitmap |= BIT_ULL(cacheline); cacheline 159 drivers/gpu/drm/i915/gt/intel_timeline.c cacheline_alloc(struct intel_timeline_hwsp *hwsp, unsigned int cacheline) cacheline 164 drivers/gpu/drm/i915/gt/intel_timeline.c GEM_BUG_ON(cacheline >= BIT(CACHELINE_BITS)); cacheline 178 drivers/gpu/drm/i915/gt/intel_timeline.c cl->vaddr = page_pack_bits(vaddr, cacheline); cacheline 223 drivers/gpu/drm/i915/gt/intel_timeline.c unsigned int cacheline; cacheline 225 drivers/gpu/drm/i915/gt/intel_timeline.c hwsp = hwsp_alloc(timeline, &cacheline); cacheline 229 drivers/gpu/drm/i915/gt/intel_timeline.c cl = cacheline_alloc(hwsp->private, cacheline); cacheline 231 drivers/gpu/drm/i915/gt/intel_timeline.c __idle_hwsp_free(hwsp->private, cacheline); cacheline 236 drivers/gpu/drm/i915/gt/intel_timeline.c timeline->hwsp_offset = cacheline * CACHELINE_BYTES; cacheline 396 drivers/gpu/drm/i915/gt/intel_timeline.c unsigned int cacheline; cacheline 420 drivers/gpu/drm/i915/gt/intel_timeline.c vma = hwsp_alloc(tl, &cacheline); cacheline 428 drivers/gpu/drm/i915/gt/intel_timeline.c __idle_hwsp_free(vma->private, cacheline); cacheline 432 drivers/gpu/drm/i915/gt/intel_timeline.c cl = cacheline_alloc(vma->private, cacheline); cacheline 435 drivers/gpu/drm/i915/gt/intel_timeline.c __idle_hwsp_free(vma->private, cacheline); cacheline 458 drivers/gpu/drm/i915/gt/intel_timeline.c tl->hwsp_offset = cacheline * CACHELINE_BYTES; cacheline 67 drivers/gpu/drm/i915/gt/selftest_timeline.c unsigned long cacheline; cacheline 74 drivers/gpu/drm/i915/gt/selftest_timeline.c cacheline = hwsp_cacheline(tl); cacheline 75 drivers/gpu/drm/i915/gt/selftest_timeline.c err = radix_tree_insert(&state->cachelines, cacheline, tl); cacheline 79 drivers/gpu/drm/i915/gt/selftest_timeline.c cacheline); cacheline 140 drivers/lightnvm/pblk-rb.c entry->cacheline = pblk_cacheline_to_addr(init_entry++); cacheline 146 drivers/lightnvm/pblk-rb.c entry->cacheline = pblk_cacheline_to_addr(init_entry++); cacheline 260 drivers/lightnvm/pblk-rb.c entry->cacheline); cacheline 353 drivers/lightnvm/pblk-rb.c pblk_update_map_cache(pblk, w_ctx.lba, entry->cacheline); cacheline 377 drivers/lightnvm/pblk-rb.c if (!pblk_update_map_gc(pblk, w_ctx.lba, entry->cacheline, line, paddr)) cacheline 166 drivers/lightnvm/pblk-write.c if (!pblk_ppa_comp(ppa_l2p, entry->cacheline)) cacheline 149 drivers/lightnvm/pblk.h struct ppa_addr cacheline; /* Cacheline for this entry */ cacheline 527 drivers/md/bcache/bset.c unsigned int cacheline, cacheline 530 drivers/md/bcache/bset.c return ((void *) t->data) + cacheline * BSET_CACHELINE + offset * 8; cacheline 539 drivers/md/bcache/bset.c unsigned int cacheline, cacheline 542 drivers/md/bcache/bset.c return (u64 *) k - (u64 *) cacheline_to_bkey(t, cacheline, 0); cacheline 559 drivers/md/bcache/bset.c static struct bkey *table_to_bkey(struct bset_tree *t, unsigned int cacheline) cacheline 561 drivers/md/bcache/bset.c return cacheline_to_bkey(t, cacheline, t->prev[cacheline]); cacheline 696 drivers/md/bcache/bset.c unsigned int j, cacheline = 1; cacheline 717 drivers/md/bcache/bset.c while (bkey_to_cacheline(t, k) < cacheline) cacheline 721 drivers/md/bcache/bset.c t->tree[j].m = bkey_to_cacheline_offset(t, cacheline++, k); cacheline 153 drivers/soc/qcom/smem.c __le32 cacheline; cacheline 269 drivers/soc/qcom/smem.c size_t cacheline[SMEM_HOST_COUNT]; cacheline 287 drivers/soc/qcom/smem.c size_t cacheline) cacheline 292 drivers/soc/qcom/smem.c return p + le32_to_cpu(phdr->size) - ALIGN(sizeof(*e), cacheline); cacheline 321 drivers/soc/qcom/smem.c cached_entry_next(struct smem_private_entry *e, size_t cacheline) cacheline 325 drivers/soc/qcom/smem.c return p - le32_to_cpu(e->size) - ALIGN(sizeof(*e), cacheline); cacheline 513 drivers/soc/qcom/smem.c size_t cacheline, cacheline 539 drivers/soc/qcom/smem.c e = phdr_to_first_cached_entry(phdr, cacheline); cacheline 554 drivers/soc/qcom/smem.c e = cached_entry_next(e, cacheline); cacheline 597 drivers/soc/qcom/smem.c cacheln = __smem->cacheline[host]; cacheline 811 drivers/soc/qcom/smem.c smem->global_cacheline = le32_to_cpu(entry->cacheline); cacheline 861 drivers/soc/qcom/smem.c smem->cacheline[remote_host] = le32_to_cpu(entry->cacheline); cacheline 899 include/asm-generic/vmlinux.lds.h #define PERCPU_INPUT(cacheline) \ cacheline 904 include/asm-generic/vmlinux.lds.h . = ALIGN(cacheline); \ cacheline 906 include/asm-generic/vmlinux.lds.h . = ALIGN(cacheline); \ cacheline 936 include/asm-generic/vmlinux.lds.h #define PERCPU_VADDR(cacheline, vaddr, phdr) \ cacheline 939 include/asm-generic/vmlinux.lds.h PERCPU_INPUT(cacheline) \ cacheline 955 include/asm-generic/vmlinux.lds.h #define PERCPU_SECTION(cacheline) \ cacheline 959 include/asm-generic/vmlinux.lds.h PERCPU_INPUT(cacheline) \ cacheline 981 include/asm-generic/vmlinux.lds.h #define RW_DATA_SECTION(cacheline, pagealigned, inittask) \ cacheline 987 include/asm-generic/vmlinux.lds.h CACHELINE_ALIGNED_DATA(cacheline) \ cacheline 988 include/asm-generic/vmlinux.lds.h READ_MOSTLY_DATA(cacheline) \