batch              25 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h extern void __flush_tlb_pending(struct ppc64_tlb_batch *batch);
batch              31 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	struct ppc64_tlb_batch *batch;
batch              35 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	batch = this_cpu_ptr(&ppc64_tlb_batch);
batch              36 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	batch->active = 1;
batch              41 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	struct ppc64_tlb_batch *batch;
batch              45 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	batch = this_cpu_ptr(&ppc64_tlb_batch);
batch              47 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	if (batch->index)
batch              48 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 		__flush_tlb_pending(batch);
batch              49 arch/powerpc/include/asm/book3s/64/tlbflush-hash.h 	batch->active = 0;
batch            1128 arch/powerpc/kernel/process.c 	struct ppc64_tlb_batch *batch;
batch            1137 arch/powerpc/kernel/process.c 	batch = this_cpu_ptr(&ppc64_tlb_batch);
batch            1138 arch/powerpc/kernel/process.c 	if (batch->active) {
batch            1140 arch/powerpc/kernel/process.c 		if (batch->index)
batch            1141 arch/powerpc/kernel/process.c 			__flush_tlb_pending(batch);
batch            1142 arch/powerpc/kernel/process.c 		batch->active = 0;
batch            1193 arch/powerpc/kernel/process.c 		batch = this_cpu_ptr(&ppc64_tlb_batch);
batch            1194 arch/powerpc/kernel/process.c 		batch->active = 1;
batch             811 arch/powerpc/mm/book3s64/hash_native.c 	struct ppc64_tlb_batch *batch = this_cpu_ptr(&ppc64_tlb_batch);
batch             812 arch/powerpc/mm/book3s64/hash_native.c 	unsigned long psize = batch->psize;
batch             813 arch/powerpc/mm/book3s64/hash_native.c 	int ssize = batch->ssize;
batch             823 arch/powerpc/mm/book3s64/hash_native.c 		vpn = batch->vpn[i];
batch             824 arch/powerpc/mm/book3s64/hash_native.c 		pte = batch->pte[i];
batch             854 arch/powerpc/mm/book3s64/hash_native.c 			vpn = batch->vpn[i];
batch             855 arch/powerpc/mm/book3s64/hash_native.c 			pte = batch->pte[i];
batch             871 arch/powerpc/mm/book3s64/hash_native.c 			vpn = batch->vpn[i];
batch             872 arch/powerpc/mm/book3s64/hash_native.c 			pte = batch->pte[i];
batch              45 arch/powerpc/mm/book3s64/hash_tlb.c 	struct ppc64_tlb_batch *batch = &get_cpu_var(ppc64_tlb_batch);
batch              52 arch/powerpc/mm/book3s64/hash_tlb.c 	i = batch->index;
batch             104 arch/powerpc/mm/book3s64/hash_tlb.c 	if (!batch->active) {
batch             120 arch/powerpc/mm/book3s64/hash_tlb.c 	if (i != 0 && (mm != batch->mm || batch->psize != psize ||
batch             121 arch/powerpc/mm/book3s64/hash_tlb.c 		       batch->ssize != ssize)) {
batch             122 arch/powerpc/mm/book3s64/hash_tlb.c 		__flush_tlb_pending(batch);
batch             126 arch/powerpc/mm/book3s64/hash_tlb.c 		batch->mm = mm;
batch             127 arch/powerpc/mm/book3s64/hash_tlb.c 		batch->psize = psize;
batch             128 arch/powerpc/mm/book3s64/hash_tlb.c 		batch->ssize = ssize;
batch             130 arch/powerpc/mm/book3s64/hash_tlb.c 	batch->pte[i] = rpte;
batch             131 arch/powerpc/mm/book3s64/hash_tlb.c 	batch->vpn[i] = vpn;
batch             132 arch/powerpc/mm/book3s64/hash_tlb.c 	batch->index = ++i;
batch             134 arch/powerpc/mm/book3s64/hash_tlb.c 		__flush_tlb_pending(batch);
batch             145 arch/powerpc/mm/book3s64/hash_tlb.c void __flush_tlb_pending(struct ppc64_tlb_batch *batch)
batch             149 arch/powerpc/mm/book3s64/hash_tlb.c 	i = batch->index;
batch             150 arch/powerpc/mm/book3s64/hash_tlb.c 	local = mm_is_thread_local(batch->mm);
batch             152 arch/powerpc/mm/book3s64/hash_tlb.c 		flush_hash_page(batch->vpn[0], batch->pte[0],
batch             153 arch/powerpc/mm/book3s64/hash_tlb.c 				batch->psize, batch->ssize, local);
batch             156 arch/powerpc/mm/book3s64/hash_tlb.c 	batch->index = 0;
batch            1807 arch/powerpc/mm/book3s64/hash_utils.c 		struct ppc64_tlb_batch *batch =
batch            1811 arch/powerpc/mm/book3s64/hash_utils.c 			flush_hash_page(batch->vpn[i], batch->pte[i],
batch            1812 arch/powerpc/mm/book3s64/hash_utils.c 					batch->psize, batch->ssize, local);
batch             263 arch/powerpc/mm/hugetlbpage.c 	struct hugepd_freelist *batch =
batch             267 arch/powerpc/mm/hugetlbpage.c 	for (i = 0; i < batch->index; i++)
batch             268 arch/powerpc/mm/hugetlbpage.c 		kmem_cache_free(PGT_CACHE(PTE_T_ORDER), batch->ptes[i]);
batch             270 arch/powerpc/mm/hugetlbpage.c 	free_page((unsigned long)batch);
batch            1284 arch/powerpc/platforms/pseries/lpar.c static void do_block_remove(unsigned long number, struct ppc64_tlb_batch *batch,
batch            1293 arch/powerpc/platforms/pseries/lpar.c 	psize = batch->psize;
batch            1294 arch/powerpc/platforms/pseries/lpar.c 	ssize = batch->ssize;
batch            1297 arch/powerpc/platforms/pseries/lpar.c 		vpn = batch->vpn[i];
batch            1298 arch/powerpc/platforms/pseries/lpar.c 		pte = batch->pte[i];
batch            1485 arch/powerpc/platforms/pseries/lpar.c 	struct ppc64_tlb_batch *batch = this_cpu_ptr(&ppc64_tlb_batch);
batch            1495 arch/powerpc/platforms/pseries/lpar.c 	if (is_supported_hlbkrm(batch->psize, batch->psize)) {
batch            1496 arch/powerpc/platforms/pseries/lpar.c 		do_block_remove(number, batch, param);
batch            1500 arch/powerpc/platforms/pseries/lpar.c 	psize = batch->psize;
batch            1501 arch/powerpc/platforms/pseries/lpar.c 	ssize = batch->ssize;
batch            1504 arch/powerpc/platforms/pseries/lpar.c 		vpn = batch->vpn[i];
batch            1505 arch/powerpc/platforms/pseries/lpar.c 		pte = batch->pte[i];
batch             602 arch/x86/include/asm/tlbflush.h static inline void arch_tlbbatch_add_mm(struct arch_tlbflush_unmap_batch *batch,
batch             606 arch/x86/include/asm/tlbflush.h 	cpumask_or(&batch->cpumask, &batch->cpumask, mm_cpumask(mm));
batch             609 arch/x86/include/asm/tlbflush.h extern void arch_tlbbatch_flush(struct arch_tlbflush_unmap_batch *batch);
batch            5832 arch/x86/kvm/mmu.c 	int nr_zapped, batch = 0;
batch            5859 arch/x86/kvm/mmu.c 		if (batch >= BATCH_ZAP_PAGES &&
batch            5861 arch/x86/kvm/mmu.c 			batch = 0;
batch            5867 arch/x86/kvm/mmu.c 			batch += nr_zapped;
batch             858 arch/x86/mm/tlb.c void arch_tlbbatch_flush(struct arch_tlbflush_unmap_batch *batch)
batch             862 arch/x86/mm/tlb.c 	if (cpumask_test_cpu(cpu, &batch->cpumask)) {
batch             869 arch/x86/mm/tlb.c 	if (cpumask_any_but(&batch->cpumask, cpu) < nr_cpu_ids)
batch             870 arch/x86/mm/tlb.c 		flush_tlb_others(&batch->cpumask, &full_flush_tlb_info);
batch             872 arch/x86/mm/tlb.c 	cpumask_clear(&batch->cpumask);
batch            2739 arch/x86/xen/mmu_pv.c 		int batch = min(REMAP_BATCH_SIZE, nr);
batch            2740 arch/x86/xen/mmu_pv.c 		int batch_left = batch;
batch            2742 arch/x86/xen/mmu_pv.c 		range = (unsigned long)batch << PAGE_SHIFT;
batch            2780 arch/x86/xen/mmu_pv.c 		nr -= batch;
batch            2783 arch/x86/xen/mmu_pv.c 			err_ptr += batch;
batch              25 block/blk-stat.c 	stat->batch = 0;
batch              37 block/blk-stat.c 	dst->mean = div_u64(src->batch + dst->mean * dst->nr_samples,
batch              47 block/blk-stat.c 	stat->batch += value;
batch             790 drivers/block/xen-blkback/blkback.c 		unsigned int batch = min(num, BLKIF_MAX_SEGMENTS_PER_REQUEST);
batch             792 drivers/block/xen-blkback/blkback.c 		invcount = xen_blkbk_unmap_prepare(ring, pages, batch,
batch             799 drivers/block/xen-blkback/blkback.c 		pages += batch;
batch             800 drivers/block/xen-blkback/blkback.c 		num -= batch;
batch            2375 drivers/char/random.c 	struct batched_entropy *batch;
batch            2380 drivers/char/random.c 	batch = raw_cpu_ptr(&batched_entropy_u64);
batch            2381 drivers/char/random.c 	spin_lock_irqsave(&batch->batch_lock, flags);
batch            2382 drivers/char/random.c 	if (batch->position % ARRAY_SIZE(batch->entropy_u64) == 0) {
batch            2383 drivers/char/random.c 		extract_crng((u8 *)batch->entropy_u64);
batch            2384 drivers/char/random.c 		batch->position = 0;
batch            2386 drivers/char/random.c 	ret = batch->entropy_u64[batch->position++];
batch            2387 drivers/char/random.c 	spin_unlock_irqrestore(&batch->batch_lock, flags);
batch            2399 drivers/char/random.c 	struct batched_entropy *batch;
batch            2404 drivers/char/random.c 	batch = raw_cpu_ptr(&batched_entropy_u32);
batch            2405 drivers/char/random.c 	spin_lock_irqsave(&batch->batch_lock, flags);
batch            2406 drivers/char/random.c 	if (batch->position % ARRAY_SIZE(batch->entropy_u32) == 0) {
batch            2407 drivers/char/random.c 		extract_crng((u8 *)batch->entropy_u32);
batch            2408 drivers/char/random.c 		batch->position = 0;
batch            2410 drivers/char/random.c 	ret = batch->entropy_u32[batch->position++];
batch            2411 drivers/char/random.c 	spin_unlock_irqrestore(&batch->batch_lock, flags);
batch             162 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	struct i915_vma *batch;
batch             182 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	batch = intel_emit_vma_fill_blt(w->ce, vma, w->value);
batch             183 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	if (IS_ERR(batch)) {
batch             184 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 		err = PTR_ERR(batch);
batch             199 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	err = intel_emit_vma_mark_active(batch, rq);
batch             219 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 					   batch->node.start, batch->node.size,
batch             229 drivers/gpu/drm/i915/gem/i915_gem_client_blt.c 	intel_emit_vma_release(w->ce, batch);
batch             229 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct i915_vma *batch; /** identity of the batch obj/vma */
batch             542 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		eb->batch = vma;
batch             745 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	unsigned int i, batch;
batch             754 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	batch = eb_batch_index(eb);
batch             806 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		err = eb_add_vma(eb, i, batch, vma);
batch             934 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct drm_i915_gem_object *obj = cache->rq->batch->obj;
batch            1149 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	struct i915_vma *batch;
batch            1166 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	batch = i915_vma_instance(pool->obj, vma->vm, NULL);
batch            1167 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (IS_ERR(batch)) {
batch            1168 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		err = PTR_ERR(batch);
batch            1172 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = i915_vma_pin(batch, 0, 0, PIN_USER | PIN_NONBLOCK);
batch            1191 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 					batch->node.start, PAGE_SIZE,
batch            1196 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	i915_vma_lock(batch);
batch            1197 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	err = i915_request_await_object(rq, batch->obj, false);
batch            1199 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		err = i915_vma_move_to_active(batch, rq, 0);
batch            1200 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	i915_vma_unlock(batch);
batch            1204 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	rq->batch = batch;
batch            1205 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	i915_vma_unpin(batch);
batch            1219 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	i915_vma_unpin(batch);
batch            1274 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		u32 *batch;
batch            1284 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		batch = reloc_gpu(eb, vma, len);
batch            1285 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		if (IS_ERR(batch))
batch            1291 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch            1292 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = lower_32_bits(addr);
batch            1293 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = upper_32_bits(addr);
batch            1294 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = lower_32_bits(target_offset);
batch            1298 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch            1299 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = lower_32_bits(addr);
batch            1300 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = upper_32_bits(addr);
batch            1301 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = upper_32_bits(target_offset);
batch            1303 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = (MI_STORE_DWORD_IMM_GEN4 | (1 << 21)) + 1;
batch            1304 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = lower_32_bits(addr);
batch            1305 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = upper_32_bits(addr);
batch            1306 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = lower_32_bits(target_offset);
batch            1307 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				*batch++ = upper_32_bits(target_offset);
batch            1310 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch            1311 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = 0;
batch            1312 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = addr;
batch            1313 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = target_offset;
batch            1315 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT;
batch            1316 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = 0;
batch            1317 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = addr;
batch            1318 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = target_offset;
batch            1320 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = MI_STORE_DWORD_IMM | MI_MEM_VIRTUAL;
batch            1321 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = addr;
batch            1322 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 			*batch++ = target_offset;
batch            1738 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	GEM_BUG_ON(!eb->batch);
batch            2006 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	batch_start = gen8_canonical_addr(eb->batch->node.start) +
batch            2013 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 				      eb->batch->obj,
batch            2044 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	eb->batch = vma;
batch            2098 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 					eb->batch->node.start +
batch            2588 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (unlikely(*eb.batch->exec_flags & EXEC_OBJECT_WRITE)) {
batch            2593 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (eb.batch_start_offset > eb.batch->size ||
batch            2594 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	    eb.batch_len > eb.batch->size - eb.batch_start_offset) {
batch            2601 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		eb.batch_len = eb.batch->size - eb.batch_start_offset;
batch            2630 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		vma = i915_gem_object_ggtt_pin(eb.batch->obj, NULL, 0, 0, 0);
batch            2636 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		eb.batch = vma;
batch            2683 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	eb.request->batch = eb.batch;
batch            2684 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (eb.batch->private)
batch            2685 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		intel_engine_pool_mark_active(eb.batch->private, eb.request);
batch            2709 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		i915_vma_unpin(eb.batch);
batch            2710 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 	if (eb.batch->private)
batch            2711 drivers/gpu/drm/i915/gem/i915_gem_execbuffer.c 		intel_engine_pool_put(eb.batch->private);
batch              21 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct i915_vma *batch;
batch              84 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = i915_vma_instance(pool->obj, ce->vm, NULL);
batch              85 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	if (IS_ERR(batch)) {
batch              86 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 		err = PTR_ERR(batch);
batch              90 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	err = i915_vma_pin(batch, 0, 0, PIN_USER);
batch              94 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch->private = pool;
batch              95 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	return batch;
batch             131 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct i915_vma *batch;
batch             149 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = intel_emit_vma_fill_blt(ce, vma, value);
batch             150 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	if (IS_ERR(batch)) {
batch             151 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 		err = PTR_ERR(batch);
batch             161 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	err = intel_emit_vma_mark_active(batch, rq);
batch             184 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 					batch->node.start, batch->node.size,
batch             192 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_emit_vma_release(ce, batch);
batch             205 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct i915_vma *batch;
batch             283 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = i915_vma_instance(pool->obj, ce->vm, NULL);
batch             284 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	if (IS_ERR(batch)) {
batch             285 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 		err = PTR_ERR(batch);
batch             289 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	err = i915_vma_pin(batch, 0, 0, PIN_USER);
batch             293 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch->private = pool;
batch             294 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	return batch;
batch             319 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	struct i915_vma *vma[2], *batch;
batch             340 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	batch = intel_emit_vma_copy_blt(ce, vma[0], vma[1]);
batch             341 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	if (IS_ERR(batch)) {
batch             342 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 		err = PTR_ERR(batch);
batch             352 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	err = intel_emit_vma_mark_active(batch, rq);
batch             381 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 					batch->node.start, batch->node.size,
batch             391 drivers/gpu/drm/i915/gem/i915_gem_object_blt.c 	intel_emit_vma_release(ce, batch);
batch             335 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 		i915->mm.shrinker.batch =
batch             336 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 			max((i915->mm.shrinker.batch + avg) >> 1,
batch             467 drivers/gpu/drm/i915/gem/i915_gem_shrinker.c 	i915->mm.shrinker.batch = 4096;
batch             630 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	struct i915_vma *batch;
batch             650 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	batch = rpcs_query_batch(vma);
batch             651 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	if (IS_ERR(batch)) {
batch             652 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		err = PTR_ERR(batch);
batch             663 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 					batch->node.start, batch->node.size,
batch             668 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_lock(batch);
batch             669 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	err = i915_request_await_object(rq, batch->obj, false);
batch             671 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 		err = i915_vma_move_to_active(batch, rq, 0);
batch             672 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_unlock(batch);
batch             684 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_unpin(batch);
batch             685 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_close(batch);
batch             686 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_put(batch);
batch             701 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_unpin(batch);
batch             702 drivers/gpu/drm/i915/gem/selftests/i915_gem_context.c 	i915_vma_put(batch);
batch             113 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	struct i915_vma *batch;
batch             121 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	batch = igt_emit_store_dw(vma, offset, count, val);
batch             122 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	if (IS_ERR(batch))
batch             123 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 		return PTR_ERR(batch);
batch             136 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 				    batch->node.start, batch->node.size,
batch             141 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_lock(batch);
batch             142 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	err = i915_request_await_object(rq, batch->obj, false);
batch             144 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 		err = i915_vma_move_to_active(batch, rq, 0);
batch             145 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_unlock(batch);
batch             159 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_unpin(batch);
batch             160 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_close(batch);
batch             161 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_put(batch);
batch             170 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_unpin(batch);
batch             171 drivers/gpu/drm/i915/gem/selftests/igt_gem_utils.c 	i915_vma_put(batch);
batch             377 drivers/gpu/drm/i915/gt/intel_engine.h static inline u32 *gen8_emit_pipe_control(u32 *batch, u32 flags, u32 offset)
batch             379 drivers/gpu/drm/i915/gt/intel_engine.h 	memset(batch, 0, 6 * sizeof(u32));
batch             381 drivers/gpu/drm/i915/gt/intel_engine.h 	batch[0] = GFX_OP_PIPE_CONTROL(6);
batch             382 drivers/gpu/drm/i915/gt/intel_engine.h 	batch[1] = flags;
batch             383 drivers/gpu/drm/i915/gt/intel_engine.h 	batch[2] = offset;
batch             385 drivers/gpu/drm/i915/gt/intel_engine.h 	return batch + 6;
batch            1329 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		   rq->batch ? upper_32_bits(rq->batch->node.start) : ~0u,
batch            1330 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		   rq->batch ? lower_32_bits(rq->batch->node.start) : ~0u);
batch            2006 drivers/gpu/drm/i915/gt/intel_lrc.c gen8_emit_flush_coherentl3_wa(struct intel_engine_cs *engine, u32 *batch)
batch            2009 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_STORE_REGISTER_MEM_GEN8 | MI_SRM_LRM_GLOBAL_GTT;
batch            2010 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = i915_mmio_reg_offset(GEN8_L3SQCREG4);
batch            2011 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = intel_gt_scratch_offset(engine->gt,
batch            2013 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = 0;
batch            2015 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_LOAD_REGISTER_IMM(1);
batch            2016 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = i915_mmio_reg_offset(GEN8_L3SQCREG4);
batch            2017 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = 0x40400000 | GEN8_LQSC_FLUSH_COHERENT_LINES;
batch            2019 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = gen8_emit_pipe_control(batch,
batch            2024 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_LOAD_REGISTER_MEM_GEN8 | MI_SRM_LRM_GLOBAL_GTT;
batch            2025 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = i915_mmio_reg_offset(GEN8_L3SQCREG4);
batch            2026 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = intel_gt_scratch_offset(engine->gt,
batch            2028 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = 0;
batch            2030 drivers/gpu/drm/i915/gt/intel_lrc.c 	return batch;
batch            2054 drivers/gpu/drm/i915/gt/intel_lrc.c static u32 *gen8_init_indirectctx_bb(struct intel_engine_cs *engine, u32 *batch)
batch            2057 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_ARB_ON_OFF | MI_ARB_DISABLE;
batch            2061 drivers/gpu/drm/i915/gt/intel_lrc.c 		batch = gen8_emit_flush_coherentl3_wa(engine, batch);
batch            2065 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = gen8_emit_pipe_control(batch,
batch            2072 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_ARB_ON_OFF | MI_ARB_ENABLE;
batch            2075 drivers/gpu/drm/i915/gt/intel_lrc.c 	while ((unsigned long)batch % CACHELINE_BYTES)
batch            2076 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = MI_NOOP;
batch            2084 drivers/gpu/drm/i915/gt/intel_lrc.c 	return batch;
batch            2092 drivers/gpu/drm/i915/gt/intel_lrc.c static u32 *emit_lri(u32 *batch, const struct lri *lri, unsigned int count)
batch            2096 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_LOAD_REGISTER_IMM(count);
batch            2098 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = i915_mmio_reg_offset(lri->reg);
batch            2099 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = lri->value;
batch            2101 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_NOOP;
batch            2103 drivers/gpu/drm/i915/gt/intel_lrc.c 	return batch;
batch            2106 drivers/gpu/drm/i915/gt/intel_lrc.c static u32 *gen9_init_indirectctx_bb(struct intel_engine_cs *engine, u32 *batch)
batch            2131 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_ARB_ON_OFF | MI_ARB_DISABLE;
batch            2134 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = gen8_emit_flush_coherentl3_wa(engine, batch);
batch            2137 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = gen8_emit_pipe_control(batch,
batch            2144 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = emit_lri(batch, lri, ARRAY_SIZE(lri));
batch            2161 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = GEN9_MEDIA_POOL_STATE;
batch            2162 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = GEN9_MEDIA_POOL_ENABLE;
batch            2163 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = 0x00777000;
batch            2164 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = 0;
batch            2165 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = 0;
batch            2166 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = 0;
batch            2169 drivers/gpu/drm/i915/gt/intel_lrc.c 	*batch++ = MI_ARB_ON_OFF | MI_ARB_ENABLE;
batch            2172 drivers/gpu/drm/i915/gt/intel_lrc.c 	while ((unsigned long)batch % CACHELINE_BYTES)
batch            2173 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = MI_NOOP;
batch            2175 drivers/gpu/drm/i915/gt/intel_lrc.c 	return batch;
batch            2179 drivers/gpu/drm/i915/gt/intel_lrc.c gen10_init_indirectctx_bb(struct intel_engine_cs *engine, u32 *batch)
batch            2189 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = gen8_emit_pipe_control(batch,
batch            2203 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = MI_NOOP;
batch            2206 drivers/gpu/drm/i915/gt/intel_lrc.c 	while ((unsigned long)batch % CACHELINE_BYTES)
batch            2207 drivers/gpu/drm/i915/gt/intel_lrc.c 		*batch++ = MI_NOOP;
batch            2209 drivers/gpu/drm/i915/gt/intel_lrc.c 	return batch;
batch            2247 drivers/gpu/drm/i915/gt/intel_lrc.c typedef u32 *(*wa_bb_func_t)(struct intel_engine_cs *engine, u32 *batch);
batch            2256 drivers/gpu/drm/i915/gt/intel_lrc.c 	void *batch, *batch_ptr;
batch            2291 drivers/gpu/drm/i915/gt/intel_lrc.c 	batch = batch_ptr = kmap_atomic(page);
batch            2299 drivers/gpu/drm/i915/gt/intel_lrc.c 		wa_bb[i]->offset = batch_ptr - batch;
batch            2307 drivers/gpu/drm/i915/gt/intel_lrc.c 		wa_bb[i]->size = batch_ptr - (batch + wa_bb[i]->offset);
batch            2310 drivers/gpu/drm/i915/gt/intel_lrc.c 	BUG_ON(batch_ptr - batch > CTX_WA_BB_OBJ_SIZE);
batch            2312 drivers/gpu/drm/i915/gt/intel_lrc.c 	kunmap_atomic(batch);
batch              70 drivers/gpu/drm/i915/gt/intel_renderstate.c #define OUT_BATCH(batch, i, val)				\
batch              74 drivers/gpu/drm/i915/gt/intel_renderstate.c 		(batch)[(i)++] = (val);				\
batch              93 drivers/gpu/drm/i915/gt/intel_renderstate.c 		u32 s = rodata->batch[i];
batch             100 drivers/gpu/drm/i915/gt/intel_renderstate.c 				    rodata->batch[i + 1] != 0)
batch              33 drivers/gpu/drm/i915/gt/intel_renderstate.h 	const u32 *batch;
batch              40 drivers/gpu/drm/i915/gt/intel_renderstate.h 		.batch = gen ## _g ## _null_state_batch,		\
batch              50 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	u32 *batch;
batch              93 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	h->batch = vaddr;
batch             140 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	u32 *batch;
batch             157 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	h->batch = vaddr;
batch             189 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	batch = h->batch;
batch             191 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch             192 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(hws_address(hws, rq));
batch             193 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = upper_32_bits(hws_address(hws, rq));
batch             194 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = rq->fence.seqno;
batch             195 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             197 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		memset(batch, 0, 1024);
batch             198 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		batch += 1024 / sizeof(*batch);
batch             200 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             201 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_BATCH_BUFFER_START | 1 << 8 | 1;
batch             202 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(vma->node.start);
batch             203 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = upper_32_bits(vma->node.start);
batch             205 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch             206 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = 0;
batch             207 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(hws_address(hws, rq));
batch             208 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = rq->fence.seqno;
batch             209 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             211 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		memset(batch, 0, 1024);
batch             212 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		batch += 1024 / sizeof(*batch);
batch             214 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             215 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_BATCH_BUFFER_START | 1 << 8;
batch             216 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(vma->node.start);
batch             218 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_STORE_DWORD_IMM_GEN4 | MI_USE_GGTT;
batch             219 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = 0;
batch             220 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(hws_address(hws, rq));
batch             221 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = rq->fence.seqno;
batch             222 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             224 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		memset(batch, 0, 1024);
batch             225 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		batch += 1024 / sizeof(*batch);
batch             227 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             228 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_BATCH_BUFFER_START | 2 << 6;
batch             229 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(vma->node.start);
batch             231 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_STORE_DWORD_IMM | MI_MEM_VIRTUAL;
batch             232 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(hws_address(hws, rq));
batch             233 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = rq->fence.seqno;
batch             234 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             236 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		memset(batch, 0, 1024);
batch             237 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		batch += 1024 / sizeof(*batch);
batch             239 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_ARB_CHECK;
batch             240 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = MI_BATCH_BUFFER_START | 2 << 6;
batch             241 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*batch++ = lower_32_bits(vma->node.start);
batch             243 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	*batch++ = MI_BATCH_BUFFER_END; /* not reached */
batch             277 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 	*h->batch = MI_BATCH_BUFFER_END;
batch             334 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*h.batch = MI_BATCH_BUFFER_END;
batch            1516 drivers/gpu/drm/i915/gt/selftest_hangcheck.c 		*h.batch = MI_BATCH_BUFFER_END;
batch            1422 drivers/gpu/drm/i915/gt/selftest_lrc.c 	struct drm_i915_gem_object *batch;
batch            1436 drivers/gpu/drm/i915/gt/selftest_lrc.c 			struct drm_i915_gem_object *batch)
batch            1442 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (batch) {
batch            1443 drivers/gpu/drm/i915/gt/selftest_lrc.c 		vma = i915_vma_instance(batch, ctx->vm, NULL);
batch            1495 drivers/gpu/drm/i915/gt/selftest_lrc.c 				   smoke->batch);
batch            1523 drivers/gpu/drm/i915/gt/selftest_lrc.c 			arg[id].batch = NULL;
batch            1573 drivers/gpu/drm/i915/gt/selftest_lrc.c 					   flags & BATCH ? smoke->batch : NULL);
batch            1613 drivers/gpu/drm/i915/gt/selftest_lrc.c 	smoke.batch = i915_gem_object_create_internal(smoke.i915, PAGE_SIZE);
batch            1614 drivers/gpu/drm/i915/gt/selftest_lrc.c 	if (IS_ERR(smoke.batch)) {
batch            1615 drivers/gpu/drm/i915/gt/selftest_lrc.c 		err = PTR_ERR(smoke.batch);
batch            1619 drivers/gpu/drm/i915/gt/selftest_lrc.c 	cs = i915_gem_object_pin_map(smoke.batch, I915_MAP_WB);
batch            1627 drivers/gpu/drm/i915/gt/selftest_lrc.c 	i915_gem_object_flush_map(smoke.batch);
batch            1628 drivers/gpu/drm/i915/gt/selftest_lrc.c 	i915_gem_object_unpin_map(smoke.batch);
batch            1662 drivers/gpu/drm/i915/gt/selftest_lrc.c 	i915_gem_object_put(smoke.batch);
batch             467 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_vma *batch;
batch             475 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	batch = create_batch(ctx);
batch             476 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(batch)) {
batch             477 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		err = PTR_ERR(batch);
batch             503 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC);
batch             552 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		i915_gem_object_flush_map(batch->obj);
batch             553 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		i915_gem_object_unpin_map(batch->obj);
batch             569 drivers/gpu/drm/i915/gt/selftest_workarounds.c 					    batch->node.start, PAGE_SIZE,
batch             674 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_vma_unpin_and_release(&batch, 0);
batch             812 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	struct i915_vma *batch;
batch             816 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	batch = create_batch(ctx);
batch             817 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	if (IS_ERR(batch))
batch             818 drivers/gpu/drm/i915/gt/selftest_workarounds.c 		return PTR_ERR(batch);
batch             820 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	cs = i915_gem_object_pin_map(batch->obj, I915_MAP_WC);
batch             838 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_gem_object_flush_map(batch->obj);
batch             854 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	err = engine->emit_bb_start(rq, batch->node.start, 0, 0);
batch             862 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_gem_object_unpin_map(batch->obj);
batch             864 drivers/gpu/drm/i915/gt/selftest_workarounds.c 	i915_vma_unpin_and_release(&batch, 0);
batch            2819 drivers/gpu/drm/i915/i915_debugfs.c 	seq_printf(m, "batch = %lu\n", i915->mm.shrinker.batch);
batch            1396 drivers/gpu/drm/i915/i915_gpu_error.c 				      request->batch,
batch             712 drivers/gpu/drm/i915/i915_request.c 	rq->batch = NULL;
batch             208 drivers/gpu/drm/i915/i915_request.h 	struct i915_vma *batch;
batch             659 drivers/gpu/drm/i915/selftests/i915_request.c 	      struct i915_vma *batch)
batch             669 drivers/gpu/drm/i915/selftests/i915_request.c 				    batch->node.start,
batch             670 drivers/gpu/drm/i915/selftests/i915_request.c 				    batch->node.size,
batch             686 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_vma *batch;
batch             698 drivers/gpu/drm/i915/selftests/i915_request.c 	batch = empty_batch(i915);
batch             699 drivers/gpu/drm/i915/selftests/i915_request.c 	if (IS_ERR(batch)) {
batch             700 drivers/gpu/drm/i915/selftests/i915_request.c 		err = PTR_ERR(batch);
batch             715 drivers/gpu/drm/i915/selftests/i915_request.c 		request = empty_request(engine, batch);
batch             726 drivers/gpu/drm/i915/selftests/i915_request.c 				request = empty_request(engine, batch);
batch             753 drivers/gpu/drm/i915/selftests/i915_request.c 	i915_vma_unpin(batch);
batch             754 drivers/gpu/drm/i915/selftests/i915_request.c 	i915_vma_put(batch);
batch             816 drivers/gpu/drm/i915/selftests/i915_request.c static int recursive_batch_resolve(struct i915_vma *batch)
batch             820 drivers/gpu/drm/i915/selftests/i915_request.c 	cmd = i915_gem_object_pin_map(batch->obj, I915_MAP_WC);
batch             825 drivers/gpu/drm/i915/selftests/i915_request.c 	intel_gt_chipset_flush(batch->vm->gt);
batch             827 drivers/gpu/drm/i915/selftests/i915_request.c 	i915_gem_object_unpin_map(batch->obj);
batch             839 drivers/gpu/drm/i915/selftests/i915_request.c 	struct i915_vma *batch;
batch             855 drivers/gpu/drm/i915/selftests/i915_request.c 	batch = recursive_batch(i915);
batch             856 drivers/gpu/drm/i915/selftests/i915_request.c 	if (IS_ERR(batch)) {
batch             857 drivers/gpu/drm/i915/selftests/i915_request.c 		err = PTR_ERR(batch);
batch             872 drivers/gpu/drm/i915/selftests/i915_request.c 					    batch->node.start,
batch             873 drivers/gpu/drm/i915/selftests/i915_request.c 					    batch->node.size,
batch             876 drivers/gpu/drm/i915/selftests/i915_request.c 		request[id]->batch = batch;
batch             878 drivers/gpu/drm/i915/selftests/i915_request.c 		i915_vma_lock(batch);
batch             879 drivers/gpu/drm/i915/selftests/i915_request.c 		err = i915_request_await_object(request[id], batch->obj, 0);
batch             881 drivers/gpu/drm/i915/selftests/i915_request.c 			err = i915_vma_move_to_active(batch, request[id], 0);
batch             882 drivers/gpu/drm/i915/selftests/i915_request.c 		i915_vma_unlock(batch);
batch             898 drivers/gpu/drm/i915/selftests/i915_request.c 	err = recursive_batch_resolve(batch);
batch             927 drivers/gpu/drm/i915/selftests/i915_request.c 	i915_vma_unpin(batch);
batch             928 drivers/gpu/drm/i915/selftests/i915_request.c 	i915_vma_put(batch);
batch             960 drivers/gpu/drm/i915/selftests/i915_request.c 		struct i915_vma *batch;
batch             962 drivers/gpu/drm/i915/selftests/i915_request.c 		batch = recursive_batch(i915);
batch             963 drivers/gpu/drm/i915/selftests/i915_request.c 		if (IS_ERR(batch)) {
batch             964 drivers/gpu/drm/i915/selftests/i915_request.c 			err = PTR_ERR(batch);
batch             990 drivers/gpu/drm/i915/selftests/i915_request.c 					    batch->node.start,
batch             991 drivers/gpu/drm/i915/selftests/i915_request.c 					    batch->node.size,
batch             994 drivers/gpu/drm/i915/selftests/i915_request.c 		request[id]->batch = batch;
batch             996 drivers/gpu/drm/i915/selftests/i915_request.c 		i915_vma_lock(batch);
batch             997 drivers/gpu/drm/i915/selftests/i915_request.c 		err = i915_request_await_object(request[id], batch->obj, false);
batch             999 drivers/gpu/drm/i915/selftests/i915_request.c 			err = i915_vma_move_to_active(batch, request[id], 0);
batch            1000 drivers/gpu/drm/i915/selftests/i915_request.c 		i915_vma_unlock(batch);
batch            1019 drivers/gpu/drm/i915/selftests/i915_request.c 		err = recursive_batch_resolve(request[id]->batch);
batch            1047 drivers/gpu/drm/i915/selftests/i915_request.c 		cmd = i915_gem_object_pin_map(request[id]->batch->obj,
batch            1053 drivers/gpu/drm/i915/selftests/i915_request.c 			i915_gem_object_unpin_map(request[id]->batch->obj);
batch            1056 drivers/gpu/drm/i915/selftests/i915_request.c 		i915_vma_put(request[id]->batch);
batch              49 drivers/gpu/drm/i915/selftests/igt_spinner.c 	spin->batch = vaddr;
batch              98 drivers/gpu/drm/i915/selftests/igt_spinner.c 	u32 *batch;
batch             133 drivers/gpu/drm/i915/selftests/igt_spinner.c 	batch = spin->batch;
batch             135 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = MI_STORE_DWORD_IMM_GEN4;
batch             136 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = lower_32_bits(hws_address(hws, rq));
batch             137 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = upper_32_bits(hws_address(hws, rq));
batch             138 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = rq->fence.seqno;
batch             140 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = arbitration_command;
batch             142 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = MI_BATCH_BUFFER_START | 1 << 8 | 1;
batch             143 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = lower_32_bits(vma->node.start);
batch             144 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = upper_32_bits(vma->node.start);
batch             145 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*batch++ = MI_BATCH_BUFFER_END; /* not reached */
batch             180 drivers/gpu/drm/i915/selftests/igt_spinner.c 	*spin->batch = MI_BATCH_BUFFER_END;
batch              23 drivers/gpu/drm/i915/selftests/igt_spinner.h 	u32 *batch;
batch             236 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 				  struct vmw_otable_batch *batch)
batch             240 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct vmw_otable *otables = batch->otables;
batch             249 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	for (i = 0; i < batch->num_otables; ++i) {
batch             261 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 			    0, false, &batch->otable_bo);
batch             266 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = ttm_bo_reserve(batch->otable_bo, false, true, NULL);
batch             268 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_driver.ttm_tt_populate(batch->otable_bo->ttm, &ctx);
batch             271 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ret = vmw_bo_map_dma(batch->otable_bo);
batch             275 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_unreserve(batch->otable_bo);
batch             278 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	for (i = 0; i < batch->num_otables; ++i) {
batch             279 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (!batch->otables[i].enabled)
batch             282 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		ret = vmw_setup_otable_base(dev_priv, i, batch->otable_bo,
batch             293 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_unreserve(batch->otable_bo);
batch             295 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	for (i = 0; i < batch->num_otables; ++i) {
batch             296 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (batch->otables[i].enabled)
batch             298 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 						 &batch->otables[i]);
batch             301 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_put(batch->otable_bo);
batch             302 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	batch->otable_bo = NULL;
batch             350 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 			       struct vmw_otable_batch *batch)
batch             353 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	struct ttm_buffer_object *bo = batch->otable_bo;
batch             356 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	for (i = 0; i < batch->num_otables; ++i)
batch             357 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 		if (batch->otables[i].enabled)
batch             359 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 						 &batch->otables[i]);
batch             367 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	ttm_bo_put(batch->otable_bo);
batch             368 drivers/gpu/drm/vmwgfx/vmwgfx_mob.c 	batch->otable_bo = NULL;
batch             241 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx.h 	struct st_lsm6dsx_reg batch[ST_LSM6DSX_MAX_ID];
batch             205 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_buffer.c 	batch_reg = &hw->settings->batch[sensor->id];
batch             589 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c 		.batch = {
batch             721 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c 		.batch = {
batch             830 drivers/iio/imu/st_lsm6dsx/st_lsm6dsx_core.c 		.batch = {
batch              69 drivers/infiniband/core/cq.c 			   int batch)
batch              78 drivers/infiniband/core/cq.c 	while ((n = ib_poll_cq(cq, min_t(u32, batch,
batch              91 drivers/infiniband/core/cq.c 		if (n != batch || (budget != -1 && completed >= budget))
batch             563 drivers/interconnect/qcom/sdm845.c 	size_t idx = 0, batch = 0, cur_vcd_size = 0;
batch             578 drivers/interconnect/qcom/sdm845.c 		n[batch]++;
batch             585 drivers/interconnect/qcom/sdm845.c 		if (n[batch] >= MAX_RPMH_PAYLOAD) {
batch             587 drivers/interconnect/qcom/sdm845.c 				n[batch] -= cur_vcd_size;
batch             588 drivers/interconnect/qcom/sdm845.c 				n[batch + 1] = cur_vcd_size;
batch             590 drivers/interconnect/qcom/sdm845.c 			batch++;
batch            1210 drivers/mailbox/bcm-flexrm-mailbox.c 		for (i = msg->batch.msgs_queued;
batch            1211 drivers/mailbox/bcm-flexrm-mailbox.c 		     i < msg->batch.msgs_count; i++) {
batch            1213 drivers/mailbox/bcm-flexrm-mailbox.c 						 &msg->batch.msgs[i]);
batch            1218 drivers/mailbox/bcm-flexrm-mailbox.c 			msg->batch.msgs_queued++;
batch             857 drivers/md/bcache/btree.c 	c->shrink.batch = c->btree_pages * 2;
batch            1693 drivers/md/dm-bufio.c 	c->shrinker.batch = 0;
batch             980 drivers/md/dm-clone-target.c static void __batch_hydration(struct batch_info *batch,
batch             986 drivers/md/dm-clone-target.c 	if (batch->head) {
batch             988 drivers/md/dm-clone-target.c 		if (batch->nr_batched_regions < max_batch_size &&
batch             989 drivers/md/dm-clone-target.c 		    (batch->head->region_nr + batch->nr_batched_regions) == hd->region_nr) {
batch             990 drivers/md/dm-clone-target.c 			list_add_tail(&hd->list, &batch->head->list);
batch             991 drivers/md/dm-clone-target.c 			batch->nr_batched_regions++;
batch             996 drivers/md/dm-clone-target.c 		if (batch->nr_batched_regions >= max_batch_size || hd) {
batch             997 drivers/md/dm-clone-target.c 			hydration_copy(batch->head, batch->nr_batched_regions);
batch             998 drivers/md/dm-clone-target.c 			batch->head = NULL;
batch             999 drivers/md/dm-clone-target.c 			batch->nr_batched_regions = 0;
batch            1014 drivers/md/dm-clone-target.c 	batch->head = hd;
batch            1015 drivers/md/dm-clone-target.c 	batch->nr_batched_regions = 1;
batch            1020 drivers/md/dm-clone-target.c 					    struct batch_info *batch)
batch            1045 drivers/md/dm-clone-target.c 			__batch_hydration(batch, hd);
batch            1069 drivers/md/dm-clone-target.c 	struct batch_info batch = {
batch            1101 drivers/md/dm-clone-target.c 		current_volume += batch.nr_batched_regions;
batch            1106 drivers/md/dm-clone-target.c 		offset = __start_next_hydration(clone, offset, &batch);
batch            1109 drivers/md/dm-clone-target.c 	if (batch.head)
batch            1110 drivers/md/dm-clone-target.c 		hydration_copy(batch.head, batch.nr_batched_regions);
batch            6184 drivers/md/raid5.c 	struct stripe_head *batch[MAX_STRIPE_BATCH], *sh;
batch            6190 drivers/md/raid5.c 		batch[batch_size++] = sh;
batch            6216 drivers/md/raid5.c 		handle_stripe(batch[i]);
batch            6223 drivers/md/raid5.c 		hash = batch[i]->hash_lock_index;
batch            6224 drivers/md/raid5.c 		__release_stripe(conf, batch[i], &temp_inactive_list[hash]);
batch            7097 drivers/md/raid5.c 	conf->shrinker.batch = 128;
batch             204 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 			unsigned int member_cnt, enum nfp_fl_lag_batch *batch)
batch             224 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	if (*batch == NFP_FL_LAG_BATCH_FIRST) {
batch             227 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 		*batch = NFP_FL_LAG_BATCH_MEMBER;
batch             233 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 		*batch = NFP_FL_LAG_BATCH_FINISHED;
batch             239 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	if (*batch == NFP_FL_LAG_BATCH_FINISHED) {
batch             266 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	enum nfp_fl_lag_batch batch = NFP_FL_LAG_BATCH_FIRST;
batch             288 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 						      &batch);
batch             357 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 					      active_count, &batch);
batch             371 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	if (batch == NFP_FL_LAG_BATCH_MEMBER) {
batch             372 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 		batch = NFP_FL_LAG_BATCH_FINISHED;
batch             373 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 		err = nfp_fl_lag_config_group(lag, NULL, NULL, 0, &batch);
batch             657 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	enum nfp_fl_lag_batch batch = NFP_FL_LAG_BATCH_FIRST;
batch             660 drivers/net/ethernet/netronome/nfp/flower/lag_conf.c 	return nfp_fl_lag_config_group(lag, NULL, NULL, 0, &batch);
batch             312 drivers/staging/android/ion/ion_heap.c 	heap->shrinker.batch = 0;
batch            1018 drivers/target/iscsi/iscsi_target_erl1.c 	int batch = 0;
batch            1026 drivers/target/iscsi/iscsi_target_erl1.c 		batch = 1;
batch            1031 drivers/target/iscsi/iscsi_target_erl1.c 			batch = 1;
batch            1039 drivers/target/iscsi/iscsi_target_erl1.c 	ooo_cmdsn->batch_count		= (batch) ?
batch             836 drivers/xen/gntdev.c static int gntdev_get_page(struct gntdev_copy_batch *batch, void __user *virt,
batch             848 drivers/xen/gntdev.c 	batch->pages[batch->nr_pages++] = page;
batch             856 drivers/xen/gntdev.c static void gntdev_put_pages(struct gntdev_copy_batch *batch)
batch             860 drivers/xen/gntdev.c 	for (i = 0; i < batch->nr_pages; i++)
batch             861 drivers/xen/gntdev.c 		put_page(batch->pages[i]);
batch             862 drivers/xen/gntdev.c 	batch->nr_pages = 0;
batch             865 drivers/xen/gntdev.c static int gntdev_copy(struct gntdev_copy_batch *batch)
batch             869 drivers/xen/gntdev.c 	gnttab_batch_copy(batch->ops, batch->nr_ops);
batch             870 drivers/xen/gntdev.c 	gntdev_put_pages(batch);
batch             876 drivers/xen/gntdev.c 	for (i = 0; i < batch->nr_ops; i++) {
batch             877 drivers/xen/gntdev.c 		s16 status = batch->ops[i].status;
batch             883 drivers/xen/gntdev.c 		if (__get_user(old_status, batch->status[i]))
batch             889 drivers/xen/gntdev.c 		if (__put_user(status, batch->status[i]))
batch             893 drivers/xen/gntdev.c 	batch->nr_ops = 0;
batch             897 drivers/xen/gntdev.c static int gntdev_grant_copy_seg(struct gntdev_copy_batch *batch,
batch             931 drivers/xen/gntdev.c 		if (batch->nr_ops >= GNTDEV_COPY_BATCH) {
batch             932 drivers/xen/gntdev.c 			ret = gntdev_copy(batch);
batch             939 drivers/xen/gntdev.c 		op = &batch->ops[batch->nr_ops];
batch             952 drivers/xen/gntdev.c 			ret = gntdev_get_page(batch, virt, false, &gfn);
batch             971 drivers/xen/gntdev.c 			ret = gntdev_get_page(batch, virt, true, &gfn);
batch             983 drivers/xen/gntdev.c 		batch->status[batch->nr_ops] = status;
batch             984 drivers/xen/gntdev.c 		batch->nr_ops++;
batch             993 drivers/xen/gntdev.c 	struct gntdev_copy_batch batch;
batch            1000 drivers/xen/gntdev.c 	batch.nr_ops = 0;
batch            1001 drivers/xen/gntdev.c 	batch.nr_pages = 0;
batch            1011 drivers/xen/gntdev.c 		ret = gntdev_grant_copy_seg(&batch, &seg, &copy.segments[i].status);
batch            1017 drivers/xen/gntdev.c 	if (batch.nr_ops)
batch            1018 drivers/xen/gntdev.c 		ret = gntdev_copy(&batch);
batch            1022 drivers/xen/gntdev.c 	gntdev_put_pages(&batch);
batch             958 drivers/xen/grant-table.c void gnttab_batch_map(struct gnttab_map_grant_ref *batch, unsigned count)
batch             962 drivers/xen/grant-table.c 	if (HYPERVISOR_grant_table_op(GNTTABOP_map_grant_ref, batch, count))
batch             964 drivers/xen/grant-table.c 	for (op = batch; op < batch + count; op++)
batch             971 drivers/xen/grant-table.c void gnttab_batch_copy(struct gnttab_copy *batch, unsigned count)
batch             975 drivers/xen/grant-table.c 	if (HYPERVISOR_grant_table_op(GNTTABOP_copy, batch, count))
batch             977 drivers/xen/grant-table.c 	for (op = batch; op < batch + count; op++)
batch            3072 fs/btrfs/tree-log.c 		int batch = atomic_read(&root->log_batch);
batch            3081 fs/btrfs/tree-log.c 		if (batch == atomic_read(&root->log_batch))
batch             195 fs/erofs/utils.c 	void *batch[PAGEVEC_SIZE];
batch             203 fs/erofs/utils.c 				       batch, first_index, PAGEVEC_SIZE);
batch             206 fs/erofs/utils.c 		struct erofs_workgroup *grp = xa_untag_pointer(batch[i]);
batch             268 fs/super.c     	s->s_shrink.batch = 1024;
batch             804 fs/xfs/xfs_icache.c 		struct xfs_inode *batch[XFS_LOOKUP_BATCH];
batch             812 fs/xfs/xfs_icache.c 					(void **)batch, first_index,
batch             817 fs/xfs/xfs_icache.c 					(void **) batch, first_index,
batch             830 fs/xfs/xfs_icache.c 			struct xfs_inode *ip = batch[i];
batch             833 fs/xfs/xfs_icache.c 				batch[i] = NULL;
batch             858 fs/xfs/xfs_icache.c 			if (!batch[i])
batch             861 fs/xfs/xfs_icache.c 			    xfs_iflags_test(batch[i], XFS_INEW))
batch             862 fs/xfs/xfs_icache.c 				xfs_inew_wait(batch[i]);
batch             863 fs/xfs/xfs_icache.c 			error = execute(batch[i], flags, args);
batch             864 fs/xfs/xfs_icache.c 			xfs_irele(batch[i]);
batch            1281 fs/xfs/xfs_icache.c 			struct xfs_inode *batch[XFS_LOOKUP_BATCH];
batch            1287 fs/xfs/xfs_icache.c 					(void **)batch, first_index,
batch            1301 fs/xfs/xfs_icache.c 				struct xfs_inode *ip = batch[i];
batch            1304 fs/xfs/xfs_icache.c 					batch[i] = NULL;
batch            1332 fs/xfs/xfs_icache.c 				if (!batch[i])
batch            1334 fs/xfs/xfs_icache.c 				error = xfs_reclaim_inode(batch[i], pag, flags);
batch            1215 fs/xfs/xfs_mount.c 	s32			batch;
batch            1251 fs/xfs/xfs_mount.c 		batch = 1;
batch            1253 fs/xfs/xfs_mount.c 		batch = XFS_FDBLOCKS_BATCH;
batch            1255 fs/xfs/xfs_mount.c 	percpu_counter_add_batch(&mp->m_fdblocks, delta, batch);
batch              66 fs/xfs/xfs_qm.c 		struct xfs_dquot *batch[XFS_DQ_LOOKUP_BATCH];
batch              71 fs/xfs/xfs_qm.c 		nr_found = radix_tree_gang_lookup(tree, (void **)batch,
batch              79 fs/xfs/xfs_qm.c 			struct xfs_dquot *dqp = batch[i];
batch              83 fs/xfs/xfs_qm.c 			error = execute(batch[i], data);
batch             239 include/asm-generic/tlb.h 	struct mmu_table_batch	*batch;
batch             458 include/linux/blk_types.h 	u64 batch;
batch              45 include/linux/mailbox/brcm-message.h 		} batch;
batch             335 include/linux/mmzone.h 	int batch;		/* chunk size for buddy add/remove */
batch              44 include/linux/percpu_counter.h 			      s32 batch);
batch              46 include/linux/percpu_counter.h int __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch);
batch             127 include/linux/percpu_counter.h __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch)
batch             141 include/linux/percpu_counter.h percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch)
batch              38 include/linux/ptr_ring.h 	int batch; /* number of entries to consume in a batch */
batch             271 include/linux/ptr_ring.h 	if (unlikely(consumer_head - r->consumer_tail >= r->batch ||
batch             476 include/linux/ptr_ring.h 	r->batch = SMP_CACHE_BYTES * 2 / sizeof(*(r->queue));
batch             482 include/linux/ptr_ring.h 	if (r->batch > r->size / 2 || !r->batch)
batch             483 include/linux/ptr_ring.h 		r->batch = 1;
batch             510 include/linux/ptr_ring.h static inline void ptr_ring_unconsume(struct ptr_ring *r, void **batch, int n,
batch             543 include/linux/ptr_ring.h 		r->queue[head] = batch[--n];
batch             552 include/linux/ptr_ring.h 		destroy(batch[--n]);
batch              66 include/linux/shrinker.h 	long batch;	/* reclaim batch size, 0 = default */
batch             241 include/xen/grant_table.h void gnttab_batch_map(struct gnttab_map_grant_ref *batch, unsigned count);
batch             242 include/xen/grant_table.h void gnttab_batch_copy(struct gnttab_copy *batch, unsigned count);
batch              82 lib/percpu_counter.c void percpu_counter_add_batch(struct percpu_counter *fbc, s64 amount, s32 batch)
batch              88 lib/percpu_counter.c 	if (count >= batch || count <= -batch) {
batch             202 lib/percpu_counter.c int __percpu_counter_compare(struct percpu_counter *fbc, s64 rhs, s32 batch)
batch             208 lib/percpu_counter.c 	if (abs(count - rhs) > (batch * num_online_cpus())) {
batch            2544 mm/memcontrol.c 	unsigned int batch = max(MEMCG_CHARGE_BATCH, nr_pages);
batch            2560 mm/memcontrol.c 	    page_counter_try_charge(&memcg->memsw, batch, &counter)) {
batch            2561 mm/memcontrol.c 		if (page_counter_try_charge(&memcg->memory, batch, &counter))
batch            2564 mm/memcontrol.c 			page_counter_uncharge(&memcg->memsw, batch);
batch            2571 mm/memcontrol.c 	if (batch > nr_pages) {
batch            2572 mm/memcontrol.c 		batch = nr_pages;
batch            2688 mm/memcontrol.c 	css_get_many(&memcg->css, batch);
batch            2689 mm/memcontrol.c 	if (batch > nr_pages)
batch            2690 mm/memcontrol.c 		refill_stock(memcg, batch - nr_pages);
batch            2708 mm/memcontrol.c 			current->memcg_nr_pages_over_high += batch;
batch             147 mm/mm_init.c   	s32 batch = max_t(s32, nr*2, 32);
batch             152 mm/mm_init.c   	vm_committed_as_batch = max_t(s32, memsized_batch, batch);
batch              18 mm/mmu_gather.c 	struct mmu_gather_batch *batch;
batch              20 mm/mmu_gather.c 	batch = tlb->active;
batch              21 mm/mmu_gather.c 	if (batch->next) {
batch              22 mm/mmu_gather.c 		tlb->active = batch->next;
batch              29 mm/mmu_gather.c 	batch = (void *)__get_free_pages(GFP_NOWAIT | __GFP_NOWARN, 0);
batch              30 mm/mmu_gather.c 	if (!batch)
batch              34 mm/mmu_gather.c 	batch->next = NULL;
batch              35 mm/mmu_gather.c 	batch->nr   = 0;
batch              36 mm/mmu_gather.c 	batch->max  = MAX_GATHER_BATCH;
batch              38 mm/mmu_gather.c 	tlb->active->next = batch;
batch              39 mm/mmu_gather.c 	tlb->active = batch;
batch              46 mm/mmu_gather.c 	struct mmu_gather_batch *batch;
batch              48 mm/mmu_gather.c 	for (batch = &tlb->local; batch && batch->nr; batch = batch->next) {
batch              49 mm/mmu_gather.c 		free_pages_and_swap_cache(batch->pages, batch->nr);
batch              50 mm/mmu_gather.c 		batch->nr = 0;
batch              57 mm/mmu_gather.c 	struct mmu_gather_batch *batch, *next;
batch              59 mm/mmu_gather.c 	for (batch = tlb->local.next; batch; batch = next) {
batch              60 mm/mmu_gather.c 		next = batch->next;
batch              61 mm/mmu_gather.c 		free_pages((unsigned long)batch, 0);
batch              68 mm/mmu_gather.c 	struct mmu_gather_batch *batch;
batch              76 mm/mmu_gather.c 	batch = tlb->active;
batch              81 mm/mmu_gather.c 	batch->pages[batch->nr++] = page;
batch              82 mm/mmu_gather.c 	if (batch->nr == batch->max) {
batch              85 mm/mmu_gather.c 		batch = tlb->active;
batch              87 mm/mmu_gather.c 	VM_BUG_ON_PAGE(batch->nr > batch->max, page);
batch             135 mm/mmu_gather.c 	struct mmu_table_batch *batch;
batch             138 mm/mmu_gather.c 	batch = container_of(head, struct mmu_table_batch, rcu);
batch             140 mm/mmu_gather.c 	for (i = 0; i < batch->nr; i++)
batch             141 mm/mmu_gather.c 		__tlb_remove_table(batch->tables[i]);
batch             143 mm/mmu_gather.c 	free_page((unsigned long)batch);
batch             148 mm/mmu_gather.c 	struct mmu_table_batch **batch = &tlb->batch;
batch             150 mm/mmu_gather.c 	if (*batch) {
batch             152 mm/mmu_gather.c 		call_rcu(&(*batch)->rcu, tlb_remove_table_rcu);
batch             153 mm/mmu_gather.c 		*batch = NULL;
batch             159 mm/mmu_gather.c 	struct mmu_table_batch **batch = &tlb->batch;
batch             161 mm/mmu_gather.c 	if (*batch == NULL) {
batch             162 mm/mmu_gather.c 		*batch = (struct mmu_table_batch *)__get_free_page(GFP_NOWAIT | __GFP_NOWARN);
batch             163 mm/mmu_gather.c 		if (*batch == NULL) {
batch             168 mm/mmu_gather.c 		(*batch)->nr = 0;
batch             171 mm/mmu_gather.c 	(*batch)->tables[(*batch)->nr++] = table;
batch             172 mm/mmu_gather.c 	if ((*batch)->nr == MAX_TABLE_BATCH)
batch             224 mm/mmu_gather.c 	tlb->batch = NULL;
batch            1300 mm/page_alloc.c 			if (prefetch_nr++ < pcp->batch)
batch            2799 mm/page_alloc.c 	int to_drain, batch;
batch            2802 mm/page_alloc.c 	batch = READ_ONCE(pcp->batch);
batch            2803 mm/page_alloc.c 	to_drain = min(pcp->count, batch);
batch            3056 mm/page_alloc.c 		unsigned long batch = READ_ONCE(pcp->batch);
batch            3057 mm/page_alloc.c 		free_pcppages_bulk(zone, batch, pcp);
batch            3223 mm/page_alloc.c 					pcp->batch, list,
batch            5738 mm/page_alloc.c static void setup_pageset(struct per_cpu_pageset *p, unsigned long batch);
batch            6037 mm/page_alloc.c 	int batch;
batch            6043 mm/page_alloc.c 	batch = zone_managed_pages(zone) / 1024;
batch            6045 mm/page_alloc.c 	if (batch * PAGE_SIZE > 1024 * 1024)
batch            6046 mm/page_alloc.c 		batch = (1024 * 1024) / PAGE_SIZE;
batch            6047 mm/page_alloc.c 	batch /= 4;		/* We effectively *= 4 below */
batch            6048 mm/page_alloc.c 	if (batch < 1)
batch            6049 mm/page_alloc.c 		batch = 1;
batch            6061 mm/page_alloc.c 	batch = rounddown_pow_of_two(batch + batch/2) - 1;
batch            6063 mm/page_alloc.c 	return batch;
batch            6097 mm/page_alloc.c 		unsigned long batch)
batch            6100 mm/page_alloc.c 	pcp->batch = 1;
batch            6107 mm/page_alloc.c 	pcp->batch = batch;
batch            6111 mm/page_alloc.c static void pageset_set_batch(struct per_cpu_pageset *p, unsigned long batch)
batch            6113 mm/page_alloc.c 	pageset_update(&p->pcp, 6 * batch, max(1UL, 1 * batch));
batch            6128 mm/page_alloc.c static void setup_pageset(struct per_cpu_pageset *p, unsigned long batch)
batch            6131 mm/page_alloc.c 	pageset_set_batch(p, batch);
batch            6141 mm/page_alloc.c 	unsigned long batch = max(1UL, high / 4);
batch            6143 mm/page_alloc.c 		batch = PAGE_SHIFT * 8;
batch            6145 mm/page_alloc.c 	pageset_update(&p->pcp, high, batch);
batch             468 mm/shmem.c     	unsigned long batch = sc ? sc->nr_to_scan : 128;
batch             498 mm/shmem.c     		if (!--batch)
batch             522 mm/slab.c      static void init_arraycache(struct array_cache *ac, int limit, int batch)
batch             527 mm/slab.c      		ac->batchcount = batch;
batch             634 mm/slab.c      						int batch, gfp_t gfp)
batch             642 mm/slab.c      		init_arraycache(&alc->ac, entries, batch);
batch             472 mm/vmscan.c    	long batch_size = shrinker->batch ? shrinker->batch
batch            1626 mm/vmstat.c    			   pageset->pcp.batch);
batch            2385 mm/zsmalloc.c  	pool->shrinker.batch = 0;
batch              64 net/core/netclassid_cgroup.c 	unsigned int batch;
batch              80 net/core/netclassid_cgroup.c 	if (--ctx->batch == 0) {
batch              81 net/core/netclassid_cgroup.c 		ctx->batch = UPDATE_CLASSID_BATCH;
batch              91 net/core/netclassid_cgroup.c 		.batch = UPDATE_CLASSID_BATCH
batch             174 tools/testing/radix-tree/test.c 		unsigned batch, xa_mark_t iftag, xa_mark_t thentag)
batch             180 tools/testing/radix-tree/test.c 	if (batch == 0)
batch             181 tools/testing/radix-tree/test.c 		batch = 1;
batch             186 tools/testing/radix-tree/test.c 		if (++tagged % batch)
batch              31 tools/testing/radix-tree/test.h 		unsigned batch, xa_mark_t iftag, xa_mark_t thentag);
batch             218 tools/testing/selftests/networking/timestamping/txtimestamp.c 	int batch = 0;
batch             259 tools/testing/selftests/networking/timestamping/txtimestamp.c 			batch++;
batch             263 tools/testing/selftests/networking/timestamping/txtimestamp.c 	if (batch > 1)
batch             264 tools/testing/selftests/networking/timestamping/txtimestamp.c 		fprintf(stderr, "batched %d timestamps\n", batch);
batch              22 tools/virtio/ringtest/main.c int batch = 1;
batch             116 tools/virtio/ringtest/main.c 	int tokick = batch;
batch             129 tools/virtio/ringtest/main.c 						tokick = batch;
batch             348 tools/virtio/ringtest/main.c 			batch = c;
batch             372 tools/virtio/ringtest/main.c 	if (batch > max_outstanding)
batch             373 tools/virtio/ringtest/main.c 		batch = max_outstanding;
batch             124 tools/virtio/ringtest/ptr_ring.c 		array.batch = param;
batch             669 tools/vm/page-types.c 	unsigned long batch;
batch             684 tools/vm/page-types.c 		batch = min_t(unsigned long, count, KPAGEFLAGS_BATCH);
batch             685 tools/vm/page-types.c 		pages = kpageflags_read(buf, index, batch);
batch             728 tools/vm/page-types.c 	unsigned long batch;
batch             734 tools/vm/page-types.c 		batch = min_t(unsigned long, count, PAGEMAP_BATCH);
batch             735 tools/vm/page-types.c 		pages = pagemap_read(buf, index, batch);