num_entries        90 arch/arc/mm/cache.c 		unsigned int pad:7, c:1, num_entries:8, num_cores:8, ver:8;
num_entries        92 arch/arc/mm/cache.c 		unsigned int ver:8, num_cores:8, num_entries:8, c:1, pad:7;
num_entries       699 arch/arm/probes/kprobes/test-core.c 	unsigned		num_entries;
num_entries       738 arch/arm/probes/kprobes/test-core.c 	struct coverage_entry *entry = coverage->base + coverage->num_entries;
num_entries       740 arch/arm/probes/kprobes/test-core.c 	if (coverage->num_entries == MAX_COVERAGE_ENTRIES - 1) {
num_entries       745 arch/arm/probes/kprobes/test-core.c 	++coverage->num_entries;
num_entries       769 arch/arm/probes/kprobes/test-core.c 	coverage.num_entries = 0;
num_entries       845 arch/arm/probes/kprobes/test-core.c 	struct coverage_entry *end = coverage.base + coverage.num_entries;
num_entries       892 arch/arm/probes/kprobes/test-core.c 	struct coverage_entry *end = coverage.base + coverage.num_entries;
num_entries      1577 arch/ia64/include/asm/pal.h 				num_entries	:	16,
num_entries      1588 arch/ia64/include/asm/pal.h #define tc_num_entries		pal_tc_info_s.num_entries
num_entries        62 arch/parisc/include/asm/pdc.h int pdc_pci_irt_size(unsigned long *num_entries, unsigned long hpa);
num_entries        63 arch/parisc/include/asm/pdc.h int pdc_pci_irt(unsigned long num_entries, unsigned long hpa, void *tbl);
num_entries       217 arch/parisc/include/asm/pdcpat.h extern int pdc_pat_get_irt_size(unsigned long *num_entries, unsigned long cell_num);
num_entries       222 arch/parisc/include/asm/pdcpat.h #define pdc_pat_get_irt_size(num_entries, cell_numn)	PDC_BAD_PROC
num_entries       911 arch/parisc/kernel/firmware.c int pdc_pci_irt_size(unsigned long *num_entries, unsigned long hpa)
num_entries       920 arch/parisc/kernel/firmware.c 	*num_entries = pdc_result[0];
num_entries       935 arch/parisc/kernel/firmware.c int pdc_pci_irt(unsigned long num_entries, unsigned long hpa, void *tbl)
num_entries       943 arch/parisc/kernel/firmware.c 	pdc_result[0] = num_entries;
num_entries      1419 arch/parisc/kernel/firmware.c int pdc_pat_get_irt_size(unsigned long *num_entries, unsigned long cell_num)
num_entries      1427 arch/parisc/kernel/firmware.c 	*num_entries = pdc_result[0];
num_entries        18 arch/riscv/include/asm/module.h 	int num_entries;
num_entries        42 arch/riscv/include/asm/module.h 	for (i = 0; i < sec->num_entries; i++) {
num_entries        95 arch/riscv/include/asm/module.h 	for (i = 0; i < sec->num_entries; i++) {
num_entries        16 arch/riscv/kernel/module-sections.c 	int i = got_sec->num_entries;
num_entries        26 arch/riscv/kernel/module-sections.c 	got_sec->num_entries++;
num_entries        27 arch/riscv/kernel/module-sections.c 	BUG_ON(got_sec->num_entries > got_sec->max_entries);
num_entries        38 arch/riscv/kernel/module-sections.c 	int i = plt_sec->num_entries;
num_entries        51 arch/riscv/kernel/module-sections.c 	plt_sec->num_entries++;
num_entries        52 arch/riscv/kernel/module-sections.c 	got_plt_sec->num_entries++;
num_entries        53 arch/riscv/kernel/module-sections.c 	BUG_ON(plt_sec->num_entries > plt_sec->max_entries);
num_entries       142 arch/riscv/kernel/module-sections.c 	mod->arch.plt.num_entries = 0;
num_entries       149 arch/riscv/kernel/module-sections.c 	mod->arch.got.num_entries = 0;
num_entries       156 arch/riscv/kernel/module-sections.c 	mod->arch.got_plt.num_entries = 0;
num_entries      3136 arch/sparc/include/asm/hypervisor.h 				 unsigned long num_entries);
num_entries      3139 arch/sparc/include/asm/hypervisor.h 				 unsigned long *num_entries);
num_entries      3148 arch/sparc/include/asm/hypervisor.h 				 unsigned long num_entries);
num_entries      3151 arch/sparc/include/asm/hypervisor.h 				 unsigned long *num_entries);
num_entries      3160 arch/sparc/include/asm/hypervisor.h 				      unsigned long num_entries);
num_entries      3163 arch/sparc/include/asm/hypervisor.h 				      unsigned long *num_entries);
num_entries        36 arch/sparc/include/asm/iommu-common.h 				unsigned long num_entries,
num_entries       279 arch/sparc/include/asm/vio.h 	u32			num_entries;
num_entries       315 arch/sparc/include/asm/vio.h 	if (++index == dr->num_entries)
num_entries       323 arch/sparc/include/asm/vio.h 		return dr->num_entries - 1;
num_entries        53 arch/sparc/kernel/iommu-common.c 			 unsigned long num_entries,
num_entries        78 arch/sparc/kernel/iommu-common.c 		iommu->poolsize = num_entries/iommu->nr_pools;
num_entries        80 arch/sparc/kernel/iommu-common.c 		iommu->poolsize = (num_entries * 3 / 4)/iommu->nr_pools;
num_entries        94 arch/sparc/kernel/iommu-common.c 	p->end = num_entries;
num_entries       989 arch/sparc/kernel/irq_64.c 	unsigned long num_entries = (qmask + 1) / 64;
num_entries       992 arch/sparc/kernel/irq_64.c 	status = sun4v_cpu_qconf(type, paddr, num_entries);
num_entries       995 arch/sparc/kernel/irq_64.c 			    "err %lu\n", type, paddr, num_entries, status);
num_entries       201 arch/sparc/kernel/ldc.c static unsigned long __advance(unsigned long off, unsigned long num_entries)
num_entries       204 arch/sparc/kernel/ldc.c 	if (off == (num_entries * LDC_PACKET_SIZE))
num_entries       975 arch/sparc/kernel/ldc.c static int alloc_queue(const char *name, unsigned long num_entries,
num_entries       981 arch/sparc/kernel/ldc.c 	size = num_entries * LDC_PACKET_SIZE;
num_entries       999 arch/sparc/kernel/ldc.c static void free_queue(unsigned long num_entries, struct ldc_packet *q)
num_entries      1006 arch/sparc/kernel/ldc.c 	size = num_entries * LDC_PACKET_SIZE;
num_entries        35 arch/sparc/kernel/pci_sun4v.h 					 unsigned long num_entries);
num_entries        39 arch/sparc/kernel/pci_sun4v.h 				  unsigned long *num_entries);
num_entries       196 arch/sparc/kernel/viohs.c 	u.pkt.num_descr = dr->num_entries;
num_entries       462 arch/sparc/kernel/viohs.c 	dr->num_entries = pkt->num_descr;
num_entries       307 arch/x86/boot/compressed/acpi.c 	u32 num_entries, size, len;
num_entries       345 arch/x86/boot/compressed/acpi.c 	num_entries = (len - sizeof(struct acpi_table_header)) / size;
num_entries       348 arch/x86/boot/compressed/acpi.c 	while (num_entries--) {
num_entries        40 arch/x86/kernel/cpu/microcode/amd.c 	unsigned int num_entries;
num_entries        71 arch/x86/kernel/cpu/microcode/amd.c 	if (!et || !et->num_entries)
num_entries        74 arch/x86/kernel/cpu/microcode/amd.c 	for (i = 0; i < et->num_entries; i++) {
num_entries       310 arch/x86/kernel/cpu/microcode/amd.c 	table.num_entries = hdr[2] / sizeof(struct equiv_cpu_entry);
num_entries       737 arch/x86/kernel/cpu/microcode/amd.c 	equiv_table.num_entries = equiv_tbl_len / sizeof(struct equiv_cpu_entry);
num_entries        65 arch/x86/kernel/ldt.c static struct ldt_struct *alloc_ldt_struct(unsigned int num_entries)
num_entries        70 arch/x86/kernel/ldt.c 	if (num_entries > LDT_ENTRIES)
num_entries        78 arch/x86/kernel/ldt.c 	alloc_size = num_entries * LDT_ENTRY_SIZE;
num_entries        99 arch/x86/kernel/ldt.c 	new_ldt->nr_entries = num_entries;
num_entries        31 arch/x86/kernel/unwind_orc.c 				    unsigned int num_entries, unsigned long ip)
num_entries        34 arch/x86/kernel/unwind_orc.c 	int *last = ip_table + num_entries - 1;
num_entries        37 arch/x86/kernel/unwind_orc.c 	if (!num_entries)
num_entries       235 arch/x86/kernel/unwind_orc.c 	unsigned int num_entries = orc_ip_size / sizeof(int);
num_entries       239 arch/x86/kernel/unwind_orc.c 		     num_entries != orc_size / sizeof(*orc));
num_entries       249 arch/x86/kernel/unwind_orc.c 	sort(orc_ip, num_entries, sizeof(int), orc_sort_cmp, orc_sort_swap);
num_entries       254 arch/x86/kernel/unwind_orc.c 	mod->arch.num_orcs = num_entries;
num_entries       262 arch/x86/kernel/unwind_orc.c 	size_t num_entries = orc_ip_size / sizeof(int);
num_entries       266 arch/x86/kernel/unwind_orc.c 	if (!num_entries || orc_ip_size % sizeof(int) != 0 ||
num_entries       268 arch/x86/kernel/unwind_orc.c 	    num_entries != orc_size / sizeof(struct orc_entry)) {
num_entries       274 arch/x86/kernel/unwind_orc.c 	sort(__start_orc_unwind_ip, num_entries, sizeof(int), orc_sort_cmp,
num_entries       281 arch/x86/kernel/unwind_orc.c 				 num_entries,
num_entries       292 arch/x86/kernel/unwind_orc.c 	orc = __orc_find(__start_orc_unwind_ip, __start_orc_unwind, num_entries,
num_entries       839 arch/x86/kvm/cpuid.c 				 __u32 num_entries, unsigned int ioctl_type)
num_entries       855 arch/x86/kvm/cpuid.c 	for (i = 0; i < num_entries; i++) {
num_entries       249 arch/x86/platform/efi/efi_64.c 	unsigned num_entries;
num_entries       273 arch/x86/platform/efi/efi_64.c 	num_entries = pgd_index(EFI_VA_END) - pgd_index(PAGE_OFFSET);
num_entries       274 arch/x86/platform/efi/efi_64.c 	memcpy(pgd_efi, pgd_k, sizeof(pgd_t) * num_entries);
num_entries       288 arch/x86/platform/efi/efi_64.c 	num_entries = p4d_index(EFI_VA_END);
num_entries       289 arch/x86/platform/efi/efi_64.c 	memcpy(p4d_efi, p4d_k, sizeof(p4d_t) * num_entries);
num_entries       303 arch/x86/platform/efi/efi_64.c 	num_entries = pud_index(EFI_VA_END);
num_entries       304 arch/x86/platform/efi/efi_64.c 	memcpy(pud_efi, pud_k, sizeof(pud_t) * num_entries);
num_entries       309 arch/x86/platform/efi/efi_64.c 	num_entries = PTRS_PER_PUD - pud_index(EFI_VA_START);
num_entries       310 arch/x86/platform/efi/efi_64.c 	memcpy(pud_efi, pud_k, sizeof(pud_t) * num_entries);
num_entries       249 arch/x86/platform/efi/quirks.c 	int num_entries;
num_entries       271 arch/x86/platform/efi/quirks.c 	num_entries = efi_memmap_split_count(&md, &mr.range);
num_entries       272 arch/x86/platform/efi/quirks.c 	num_entries += efi.memmap.nr_map;
num_entries       274 arch/x86/platform/efi/quirks.c 	new_size = efi.memmap.desc_size * num_entries;
num_entries       276 arch/x86/platform/efi/quirks.c 	new_phys = efi_memmap_alloc(num_entries);
num_entries       291 arch/x86/platform/efi/quirks.c 	efi_memmap_install(new_phys, num_entries);
num_entries       409 arch/x86/platform/efi/quirks.c 	int num_entries = 0;
num_entries       419 arch/x86/platform/efi/quirks.c 			num_entries++;
num_entries       425 arch/x86/platform/efi/quirks.c 			num_entries++;
num_entries       459 arch/x86/platform/efi/quirks.c 	if (!num_entries)
num_entries       462 arch/x86/platform/efi/quirks.c 	new_size = efi.memmap.desc_size * num_entries;
num_entries       463 arch/x86/platform/efi/quirks.c 	new_phys = efi_memmap_alloc(num_entries);
num_entries       493 arch/x86/platform/efi/quirks.c 	if (efi_memmap_install(new_phys, num_entries)) {
num_entries       851 block/sed-opal.c 	int num_entries = 0;
num_entries       905 block/sed-opal.c 		num_entries++;
num_entries       908 block/sed-opal.c 	resp->num = num_entries;
num_entries       393 drivers/acpi/cppc_acpi.c 	if (pdomain->num_entries != ACPI_PSD_REV0_ENTRIES) {
num_entries       754 drivers/acpi/cppc_acpi.c 	cpc_ptr->num_entries = num_ent;
num_entries       877 drivers/acpi/cppc_acpi.c 	for (i = 2; i < cpc_ptr->num_entries; i++) {
num_entries       920 drivers/acpi/cppc_acpi.c 	for (i = 2; i < cpc_ptr->num_entries; i++) {
num_entries       548 drivers/acpi/processor_perflib.c 	if (pdomain->num_entries != ACPI_PSD_REV0_ENTRIES) {
num_entries       618 drivers/acpi/processor_throttling.c 	if (pdomain->num_entries != ACPI_TSD_REV0_ENTRIES) {
num_entries       883 drivers/atm/nicstar.c 	scq->num_entries = size / NS_SCQE_SIZE;
num_entries       886 drivers/atm/nicstar.c 	scq->last = scq->base + (scq->num_entries - 1);
num_entries       889 drivers/atm/nicstar.c 	scq->num_entries = size / NS_SCQE_SIZE;
num_entries       895 drivers/atm/nicstar.c 	for (i = 0; i < scq->num_entries; i++)
num_entries       906 drivers/atm/nicstar.c 	if (scq->num_entries == VBR_SCQ_NUM_ENTRIES)
num_entries       907 drivers/atm/nicstar.c 		for (i = 0; i < scq->num_entries; i++) {
num_entries       920 drivers/atm/nicstar.c 			for (i = 0; i < scq->num_entries; i++)
num_entries       923 drivers/atm/nicstar.c 			for (i = 0; i < scq->num_entries; i++) {
num_entries       934 drivers/atm/nicstar.c 			  2 * (scq->num_entries == VBR_SCQ_NUM_ENTRIES ?
num_entries      1532 drivers/atm/nicstar.c 		for (i = 0; i < scq->num_entries; i++) {
num_entries      1763 drivers/atm/nicstar.c 	if (scq->num_entries == VBR_SCQ_NUM_ENTRIES) {
num_entries      1913 drivers/atm/nicstar.c 	if (pos >= scq->num_entries) {
num_entries      1920 drivers/atm/nicstar.c 	if (++i == scq->num_entries)
num_entries      1939 drivers/atm/nicstar.c 		if (++i == scq->num_entries)
num_entries       669 drivers/atm/nicstar.h 	unsigned num_entries;
num_entries       732 drivers/block/sunvdc.c 	dr->num_entries = VDC_TX_RING_SIZE;
num_entries       746 drivers/block/sunvdc.c 				   (dr->entry_size * dr->num_entries),
num_entries       750 drivers/block/sunvdc.c 		dr->num_entries = 0;
num_entries        66 drivers/char/agp/agp.h 	int num_entries;
num_entries        73 drivers/char/agp/agp.h 	int num_entries;
num_entries        80 drivers/char/agp/agp.h 	int num_entries;
num_entries        87 drivers/char/agp/agp.h 	int num_entries;
num_entries        93 drivers/char/agp/agp.h 	int num_entries;
num_entries        88 drivers/char/agp/alpha-agp.c 	int num_entries, status;
num_entries        95 drivers/char/agp/alpha-agp.c 	num_entries = A_SIZE_FIX(temp)->num_entries;
num_entries        96 drivers/char/agp/alpha-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries       169 drivers/char/agp/alpha-agp.c 	aper_size->num_entries = agp->aperture.size / PAGE_SIZE;
num_entries       170 drivers/char/agp/alpha-agp.c 	aper_size->page_order = __ffs(aper_size->num_entries / 1024);
num_entries       138 drivers/char/agp/amd-k7-agp.c 	retval = amd_create_gatt_pages(value->num_entries / 1024);
num_entries       157 drivers/char/agp/amd-k7-agp.c 	for (i = 0; i < value->num_entries / 1024; i++, addr += 0x00400000) {
num_entries       163 drivers/char/agp/amd-k7-agp.c 	for (i = 0; i < value->num_entries; i++) {
num_entries       287 drivers/char/agp/amd-k7-agp.c 	int i, j, num_entries;
num_entries       291 drivers/char/agp/amd-k7-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       297 drivers/char/agp/amd-k7-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries        47 drivers/char/agp/amd64-agp.c 	int i, j, num_entries;
num_entries        53 drivers/char/agp/amd64-agp.c 	num_entries = agp_num_entries();
num_entries        64 drivers/char/agp/amd64-agp.c 	if (((unsigned long)pg_start + mem->page_count) > num_entries)
num_entries       271 drivers/char/agp/ati-agp.c 	int i, j, num_entries;
num_entries       276 drivers/char/agp/ati-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       285 drivers/char/agp/ati-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries       358 drivers/char/agp/ati-agp.c 	retval = ati_create_gatt_pages(value->num_entries / 1024);
num_entries       394 drivers/char/agp/ati-agp.c 	for (i = 0; i < value->num_entries / 1024; i++, addr += 0x00400000) {
num_entries       400 drivers/char/agp/ati-agp.c 	for (i = 0; i < value->num_entries; i++) {
num_entries       198 drivers/char/agp/efficeon-agp.c 	int num_entries, l1_pages;
num_entries       200 drivers/char/agp/efficeon-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       202 drivers/char/agp/efficeon-agp.c 	printk(KERN_DEBUG PFX "efficeon_create_gatt_table(%d)\n", num_entries);
num_entries       205 drivers/char/agp/efficeon-agp.c 	BUG_ON(num_entries & 0x3ff);
num_entries       206 drivers/char/agp/efficeon-agp.c 	l1_pages = num_entries >> 10;
num_entries       238 drivers/char/agp/efficeon-agp.c 	int i, count = mem->page_count, num_entries;
num_entries       245 drivers/char/agp/efficeon-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       246 drivers/char/agp/efficeon-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries       287 drivers/char/agp/efficeon-agp.c 	int i, count = mem->page_count, num_entries;
num_entries       291 drivers/char/agp/efficeon-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       293 drivers/char/agp/efficeon-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries       321 drivers/char/agp/generic.c 	int num_entries;
num_entries       328 drivers/char/agp/generic.c 		num_entries = A_SIZE_8(temp)->num_entries;
num_entries       331 drivers/char/agp/generic.c 		num_entries = A_SIZE_16(temp)->num_entries;
num_entries       334 drivers/char/agp/generic.c 		num_entries = A_SIZE_32(temp)->num_entries;
num_entries       337 drivers/char/agp/generic.c 		num_entries = A_SIZE_LVL2(temp)->num_entries;
num_entries       340 drivers/char/agp/generic.c 		num_entries = A_SIZE_FIX(temp)->num_entries;
num_entries       343 drivers/char/agp/generic.c 		num_entries = 0;
num_entries       347 drivers/char/agp/generic.c 	num_entries -= agp_memory_reserved>>PAGE_SHIFT;
num_entries       348 drivers/char/agp/generic.c 	if (num_entries<0)
num_entries       349 drivers/char/agp/generic.c 		num_entries = 0;
num_entries       350 drivers/char/agp/generic.c 	return num_entries;
num_entries       855 drivers/char/agp/generic.c 	int num_entries;
num_entries       867 drivers/char/agp/generic.c 	size = page_order = num_entries = 0;
num_entries       876 drivers/char/agp/generic.c 				num_entries =
num_entries       877 drivers/char/agp/generic.c 				    A_SIZE_8(temp)->num_entries;
num_entries       882 drivers/char/agp/generic.c 				num_entries = A_SIZE_16(temp)->num_entries;
num_entries       887 drivers/char/agp/generic.c 				num_entries = A_SIZE_32(temp)->num_entries;
num_entries       893 drivers/char/agp/generic.c 				size = page_order = num_entries = 0;
num_entries       925 drivers/char/agp/generic.c 		num_entries = ((struct aper_size_info_fixed *) temp)->num_entries;
num_entries       963 drivers/char/agp/generic.c 	for (i = 0; i < num_entries; i++) {
num_entries      1031 drivers/char/agp/generic.c 	int num_entries;
num_entries      1049 drivers/char/agp/generic.c 		num_entries = A_SIZE_8(temp)->num_entries;
num_entries      1052 drivers/char/agp/generic.c 		num_entries = A_SIZE_16(temp)->num_entries;
num_entries      1055 drivers/char/agp/generic.c 		num_entries = A_SIZE_32(temp)->num_entries;
num_entries      1058 drivers/char/agp/generic.c 		num_entries = A_SIZE_FIX(temp)->num_entries;
num_entries      1064 drivers/char/agp/generic.c 		num_entries = 0;
num_entries      1068 drivers/char/agp/generic.c 	num_entries -= agp_memory_reserved/PAGE_SIZE;
num_entries      1069 drivers/char/agp/generic.c 	if (num_entries < 0) num_entries = 0;
num_entries      1080 drivers/char/agp/generic.c 	if (((pg_start + mem->page_count) > num_entries) ||
num_entries      1115 drivers/char/agp/generic.c 	int mask_type, num_entries;
num_entries      1127 drivers/char/agp/generic.c 	num_entries = agp_num_entries();
num_entries      1128 drivers/char/agp/generic.c 	if (((pg_start + mem->page_count) > num_entries) ||
num_entries       153 drivers/char/agp/i460-agp.c 		values[i].num_entries = (values[i].size << 8) >> (I460_IO_PAGE_SHIFT - 12);
num_entries       154 drivers/char/agp/i460-agp.c 		values[i].page_order = ilog2((sizeof(u32)*values[i].num_entries) >> PAGE_SHIFT);
num_entries       234 drivers/char/agp/i460-agp.c 		size = current_size->num_entries * sizeof(i460.lp_desc[0]);
num_entries       244 drivers/char/agp/i460-agp.c 	int page_order, num_entries, i;
num_entries       252 drivers/char/agp/i460-agp.c 	num_entries = A_SIZE_8(temp)->num_entries;
num_entries       265 drivers/char/agp/i460-agp.c 	for (i = 0; i < num_entries; ++i)
num_entries       273 drivers/char/agp/i460-agp.c 	int num_entries, i;
num_entries       278 drivers/char/agp/i460-agp.c 	num_entries = A_SIZE_8(temp)->num_entries;
num_entries       280 drivers/char/agp/i460-agp.c 	for (i = 0; i < num_entries; ++i)
num_entries       282 drivers/char/agp/i460-agp.c 	WR_FLUSH_GATT(num_entries - 1);
num_entries       297 drivers/char/agp/i460-agp.c 	int i, j, k, num_entries;
num_entries       309 drivers/char/agp/i460-agp.c 	num_entries = A_SIZE_8(temp)->num_entries;
num_entries       311 drivers/char/agp/i460-agp.c 	if ((io_pg_start + I460_IOPAGES_PER_KPAGE * mem->page_count) > num_entries) {
num_entries       403 drivers/char/agp/i460-agp.c 	int i, start_offset, end_offset, idx, pg, num_entries;
num_entries       411 drivers/char/agp/i460-agp.c 	num_entries = A_SIZE_8(temp)->num_entries;
num_entries       419 drivers/char/agp/i460-agp.c 	if (end > i460.lp_desc + num_entries) {
num_entries       464 drivers/char/agp/i460-agp.c 	int i, pg, start_offset, end_offset, idx, num_entries;
num_entries       469 drivers/char/agp/i460-agp.c 	num_entries = A_SIZE_8(temp)->num_entries;
num_entries        99 drivers/char/agp/intel-gtt.c 				unsigned int num_entries,
num_entries       105 drivers/char/agp/intel-gtt.c 	DBG("try mapping %lu pages\n", (unsigned long)num_entries);
num_entries       107 drivers/char/agp/intel-gtt.c 	if (sg_alloc_table(st, num_entries, GFP_KERNEL))
num_entries       110 drivers/char/agp/intel-gtt.c 	for_each_sg(st->sgl, sg, num_entries, i)
num_entries       883 drivers/char/agp/intel-gtt.c 				   unsigned int num_entries,
num_entries       889 drivers/char/agp/intel-gtt.c 	for (i = 0, j = first_entry; i < num_entries; i++, j++) {
num_entries       949 drivers/char/agp/intel-gtt.c void intel_gtt_clear_range(unsigned int first_entry, unsigned int num_entries)
num_entries       953 drivers/char/agp/intel-gtt.c 	for (i = first_entry; i < (first_entry + num_entries); i++) {
num_entries       132 drivers/char/agp/nvidia-agp.c 	nvidia_private.num_active_entries = current_size->num_entries;
num_entries       162 drivers/char/agp/sworks-agp.c 	retval = serverworks_create_gatt_pages(value->num_entries / 1024);
num_entries       182 drivers/char/agp/sworks-agp.c 	for (i = 0; i < value->num_entries / 1024; i++)
num_entries       323 drivers/char/agp/sworks-agp.c 	int i, j, num_entries;
num_entries       327 drivers/char/agp/sworks-agp.c 	num_entries = A_SIZE_LVL2(agp_bridge->current_size)->num_entries;
num_entries       332 drivers/char/agp/sworks-agp.c 	if ((pg_start + mem->page_count) > num_entries) {
num_entries       151 drivers/char/agp/uninorth-agp.c 	int i, num_entries;
num_entries       169 drivers/char/agp/uninorth-agp.c 	num_entries = A_SIZE_32(temp)->num_entries;
num_entries       171 drivers/char/agp/uninorth-agp.c 	if ((pg_start + mem->page_count) > num_entries)
num_entries       374 drivers/char/agp/uninorth-agp.c 	int num_entries;
num_entries       386 drivers/char/agp/uninorth-agp.c 	size = page_order = num_entries = 0;
num_entries       391 drivers/char/agp/uninorth-agp.c 		num_entries = A_SIZE_32(temp)->num_entries;
num_entries       436 drivers/char/agp/uninorth-agp.c 	for (i = 0; i < num_entries; i++)
num_entries       189 drivers/crypto/n2_core.c static void *spu_queue_alloc(struct spu_queue *q, int num_entries)
num_entries       193 drivers/crypto/n2_core.c 	if (avail >= num_entries)
num_entries       214 drivers/crypto/n2_core.h 				     unsigned long num_entries,
num_entries       219 drivers/crypto/n2_core.h 				     unsigned long *num_entries);
num_entries        99 drivers/crypto/qat/qat_common/adf_accel_devices.h 	u32 num_entries;
num_entries       219 drivers/crypto/qat/qat_common/adf_isr.c 	if (pci_dev_info->msix_entries.num_entries > 1) {
num_entries       256 drivers/crypto/qat/qat_common/adf_isr.c 	accel_dev->accel_pci_dev.msix_entries.num_entries = msix_num_entries;
num_entries       274 drivers/crypto/qat/qat_common/adf_isr.c 	for (i = 0; i < accel_dev->accel_pci_dev.msix_entries.num_entries; i++)
num_entries        41 drivers/firmware/efi/memattr.c 	tbl_size = sizeof(*tbl) + tbl->num_entries * tbl->desc_size;
num_entries       160 drivers/firmware/efi/memattr.c 	for (i = ret = 0; ret == 0 && i < tbl->num_entries; i++) {
num_entries        42 drivers/firmware/efi/memmap.c phys_addr_t __init efi_memmap_alloc(unsigned int num_entries)
num_entries        44 drivers/firmware/efi/memmap.c 	unsigned long size = num_entries * efi.memmap.desc_size;
num_entries      1572 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 	u8 frev, crev, num_entries, t_mem_id, num_ranges = 0;
num_entries      1599 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 					num_entries = (u8)((le16_to_cpu(reg_block->usRegIndexTblSize)) /
num_entries      1601 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 					if (num_entries > VBIOS_MC_REGISTER_ARRAY_SIZE)
num_entries      1603 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 					while (i < num_entries) {
num_entries      1640 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.c 					reg_table->num_entries = num_ranges;
num_entries        94 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.h 	u8 num_entries;
num_entries       114 drivers/gpu/drm/amd/amdgpu/amdgpu_atombios.h 	u8 num_entries;
num_entries        64 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 			  unsigned num_entries, struct amdgpu_bo_list **result)
num_entries        66 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	unsigned last_entry = 0, first_userptr = num_entries;
num_entries        74 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	if (num_entries > (SIZE_MAX - sizeof(struct amdgpu_bo_list))
num_entries        79 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	size += num_entries * sizeof(struct amdgpu_bo_list_entry);
num_entries        90 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	memset(array, 0, num_entries * sizeof(struct amdgpu_bo_list_entry));
num_entries        92 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	for (i = 0; i < num_entries; ++i) {
num_entries       135 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	list->num_entries = num_entries;
num_entries       137 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	trace_amdgpu_cs_bo_status(list->num_entries, total_size);
num_entries       148 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.c 	for (i = first_userptr; i < num_entries; ++i) {
num_entries        49 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h 	unsigned num_entries;
num_entries        63 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h 				 unsigned num_entries,
num_entries        76 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h 	     e != amdgpu_bo_list_array_entry(list, (list)->num_entries); \
num_entries        81 drivers/gpu/drm/amd/amdgpu/amdgpu_bo_list.h 	     e != amdgpu_bo_list_array_entry(list, (list)->num_entries); \
num_entries       606 drivers/gpu/drm/amd/amdgpu/amdgpu_cs.c 	if (p->bo_list->first_userptr != p->bo_list->num_entries)
num_entries       416 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	unsigned shift, num_entries;
num_entries       424 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	num_entries = amdgpu_vm_num_entries(adev, cursor->level - 1);
num_entries       426 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 	if (cursor->entry == &cursor->parent->entries[num_entries - 1])
num_entries       861 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		unsigned num_entries;
num_entries       863 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		num_entries = amdgpu_vm_num_entries(adev, cursor->level);
num_entries       864 drivers/gpu/drm/amd/amdgpu/amdgpu_vm.c 		entry->entries = kvmalloc_array(num_entries,
num_entries        85 drivers/gpu/drm/amd/amdgpu/kv_dpm.c 		for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries        89 drivers/gpu/drm/amd/amdgpu/kv_dpm.c 		return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
num_entries       108 drivers/gpu/drm/amd/amdgpu/kv_dpm.c 		for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries       113 drivers/gpu/drm/amd/amdgpu/kv_dpm.c 		return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
num_entries       189 drivers/gpu/drm/amd/amdgpu/kv_dpm.c 	vid_mapping_table->num_entries = i;
num_entries        43 drivers/gpu/drm/amd/amdgpu/kv_dpm.h 	u32 num_entries;
num_entries      5830 drivers/gpu/drm/amd/amdgpu/si_dpm.c 			for (k = 0; k < table->num_entries; k++)
num_entries      5841 drivers/gpu/drm/amd/amdgpu/si_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      5855 drivers/gpu/drm/amd/amdgpu/si_dpm.c 				for (k = 0; k < table->num_entries; k++)
num_entries      5865 drivers/gpu/drm/amd/amdgpu/si_dpm.c 			for(k = 0; k < table->num_entries; k++)
num_entries      5940 drivers/gpu/drm/amd/amdgpu/si_dpm.c 		for (j = 1; j < table->num_entries; j++) {
num_entries      5967 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
num_entries      5974 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      5982 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	si_table->num_entries = table->num_entries;
num_entries      6059 drivers/gpu/drm/amd/amdgpu/si_dpm.c 				    u32 num_entries, u32 valid_flag)
num_entries      6063 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	for(i = 0, j = 0; j < num_entries; j++) {
num_entries      6078 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	for (i = 0; i < si_pi->mc_reg_table.num_entries; i++) {
num_entries      6083 drivers/gpu/drm/amd/amdgpu/si_dpm.c 	if ((i == si_pi->mc_reg_table.num_entries) && (i > 0))
num_entries       276 drivers/gpu/drm/amd/amdgpu/si_dpm.h 	u8 num_entries;
num_entries       622 drivers/gpu/drm/amd/amdgpu/si_dpm.h 	u8 num_entries;
num_entries       930 drivers/gpu/drm/amd/amdgpu/si_dpm.h 	u8 num_entries;
num_entries       880 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 				uint32_t *num_entries,
num_entries       892 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 	*num_entries = 0;
num_entries       915 drivers/gpu/drm/amd/amdkfd/kfd_crat.c 		(*num_entries)++;
num_entries       207 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c 	gamma->num_entries = lut_size;
num_entries       230 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c 	gamma->num_entries = lut_size;
num_entries       268 drivers/gpu/drm/amd/display/amdgpu_dm/amdgpu_dm_color.c 	gamma->num_entries = lut_size;
num_entries       381 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 		.num_entries = 4,
num_entries       487 drivers/gpu/drm/amd/display/dc/clk_mgr/dcn21/rn_clk_mgr.c 	bw_params->clk_table.num_entries = i;
num_entries      1749 drivers/gpu/drm/amd/display/dc/core/dc.c 		surface->gamma_correction->num_entries =
num_entries      1750 drivers/gpu/drm/amd/display/dc/core/dc.c 			srf_update->gamma->num_entries;
num_entries       468 drivers/gpu/drm/amd/display/dc/dc_hw_types.h 	unsigned int num_entries;
num_entries       198 drivers/gpu/drm/amd/display/dc/dce/dce_ipp.c 	for (i = 0; i < gamma->num_entries; i++) {
num_entries       805 drivers/gpu/drm/amd/display/dc/dcn10/dcn10_dpp_cm.c 	for (i = 0; i < gamma->num_entries; i++) {
num_entries      1032 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	vlevel_max = bw_params->clk_table.num_entries - 1;
num_entries      1285 drivers/gpu/drm/amd/display/dc/dcn21/dcn21_resource.c 	for (i = 0; i < clk_table->num_entries; i++) {
num_entries        65 drivers/gpu/drm/amd/display/dc/inc/hw/clk_mgr.h 	unsigned int num_entries;
num_entries       464 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	const uint32_t max_number = ramp->num_entries + 3;
num_entries       571 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		if (index_left >= ramp->num_entries + 3) {
num_entries       576 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		if (index_right >= ramp->num_entries + 3) {
num_entries      1082 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	struct pwl_float_data *rgb_last = rgb + ramp->num_entries - 1;
num_entries      1094 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	} while (i != ramp->num_entries);
num_entries      1108 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	} while (i != ramp->num_entries);
num_entries      1147 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	for (i = 0 ; i < ramp->num_entries; i++) {
num_entries      1172 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	for (i = 0 ; i < ramp->num_entries; i++) {
num_entries      1432 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 	max_entries += ramp->num_entries;
num_entries      1669 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		rgb_user = kvcalloc(ramp->num_entries + _EXTRA_POINTS,
num_entries      1675 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		axis_x = kvcalloc(ramp->num_entries + 3, sizeof(*axis_x),
num_entries      1686 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 				ramp->num_entries,
num_entries      1898 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		rgb_user = kvcalloc(ramp->num_entries + _EXTRA_POINTS,
num_entries      1904 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 		axis_x = kvcalloc(ramp->num_entries + _EXTRA_POINTS, sizeof(*axis_x),
num_entries      1915 drivers/gpu/drm/amd/display/modules/color/color_gamma.c 				ramp->num_entries,
num_entries       211 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	unsigned int num_entries = NUM_BL_CURVE_SEGS;
num_entries       216 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	table->backlight_thresholds[num_entries-1] = 0xFFFF;
num_entries       217 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	table->backlight_offsets[num_entries-1] =
num_entries       228 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	for (i = 1; i+1 < num_entries; i++) {
num_entries       229 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 		lut_index = (params.backlight_lut_array_size - 1) * i / (num_entries - 1);
num_entries       233 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 			cpu_to_be16(DIV_ROUNDUP((i * 65536), num_entries));
num_entries       243 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	unsigned int num_entries = NUM_BL_CURVE_SEGS;
num_entries       248 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	table->backlight_thresholds[num_entries-1] = 0xFFFF;
num_entries       249 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	table->backlight_offsets[num_entries-1] =
num_entries       260 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 	for (i = 1; i+1 < num_entries; i++) {
num_entries       261 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 		lut_index = (params.backlight_lut_array_size - 1) * i / (num_entries - 1);
num_entries       265 drivers/gpu/drm/amd/display/modules/power/power_helpers.c 			cpu_to_be16(DIV_ROUNDUP((i * 65536), num_entries));
num_entries        89 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c 	table->num_entries = num_ranges;
num_entries       106 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c 	uint8_t num_entries = (uint8_t)((le16_to_cpu(reg_block->usRegIndexTblSize))
num_entries       110 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c 	num_entries--;        /* subtract 1 data end mark entry */
num_entries       112 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c 	PP_ASSERT_WITH_CODE((num_entries <= VBIOS_MC_REGISTER_ARRAY_SIZE),
num_entries       117 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.c 			(i < num_entries)) {
num_entries       193 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.h 	uint8_t   num_entries;
num_entries       249 drivers/gpu/drm/amd/powerplay/hwmgr/ppatomctrl.h 	uint8_t                         num_entries;             /* number of AC timing entries */
num_entries       751 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 	uint8_t num_entries;
num_entries       772 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 		num_entries = clk_dep_table->ucNumEntries + 1 > NUM_DSPCLK_LEVELS ?
num_entries       775 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 		num_entries = clk_dep_table->ucNumEntries;
num_entries       780 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 			num_entries;
num_entries       787 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 	clk_table->count = (uint32_t)num_entries;
num_entries       796 drivers/gpu/drm/amd/powerplay/hwmgr/vega10_processpptables.c 	if (i < num_entries) {
num_entries        97 drivers/gpu/drm/amd/powerplay/inc/smu_ucode_xfer_vi.h 	uint32_t num_entries;
num_entries      1748 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	uint32_t num_entries, uint32_t valid_flag)
num_entries      1752 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries      1769 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	for (i = 0; i < smu_data->mc_reg_table.num_entries; i++) {
num_entries      1776 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	if ((i == smu_data->mc_reg_table.num_entries) && (i > 0))
num_entries      2558 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	PP_ASSERT_WITH_CODE((table->num_entries <= MAX_AC_TIMING_ENTRIES),
num_entries      2566 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      2575 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 	ni_table->num_entries = table->num_entries;
num_entries      2597 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2609 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2624 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      2637 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2661 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.c 		for (j = 1; j < table->num_entries; j++) {
num_entries        55 drivers/gpu/drm/amd/powerplay/smumgr/ci_smumgr.h 	uint8_t   num_entries;
num_entries      1716 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	uint32_t num_entries, uint32_t valid_flag)
num_entries      1720 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries      1736 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	for (i = 0; i < smu_data->mc_reg_table.num_entries; i++) {
num_entries      1743 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	if ((i == smu_data->mc_reg_table.num_entries) && (i > 0))
num_entries      2487 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	PP_ASSERT_WITH_CODE((table->num_entries <= MAX_AC_TIMING_ENTRIES),
num_entries      2495 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      2504 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 	ni_table->num_entries = table->num_entries;
num_entries      2526 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2538 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2554 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      2567 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      2590 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.c 		for (j = 1; j < table->num_entries; j++) {
num_entries        54 drivers/gpu/drm/amd/powerplay/smumgr/iceland_smumgr.h 	uint8_t   num_entries;        /* number of entries in mc_reg_table_entry used*/
num_entries       389 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 		toc->num_entries = 0;
num_entries       393 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_RLC_G, &toc->entry[toc->num_entries++]),
num_entries       396 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_CE, &toc->entry[toc->num_entries++]),
num_entries       399 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_PFP, &toc->entry[toc->num_entries++]),
num_entries       402 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_ME, &toc->entry[toc->num_entries++]),
num_entries       405 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_MEC, &toc->entry[toc->num_entries++]),
num_entries       408 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_MEC_JT1, &toc->entry[toc->num_entries++]),
num_entries       411 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_CP_MEC_JT2, &toc->entry[toc->num_entries++]),
num_entries       414 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_SDMA0, &toc->entry[toc->num_entries++]),
num_entries       417 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_SDMA1, &toc->entry[toc->num_entries++]),
num_entries       421 drivers/gpu/drm/amd/powerplay/smumgr/smu7_smumgr.c 				UCODE_ID_MEC_STORAGE, &toc->entry[toc->num_entries++]),
num_entries      2094 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	uint32_t num_entries, uint32_t valid_flag)
num_entries      2098 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries      2115 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	for (i = 0; i < smu_data->mc_reg_table.num_entries; i++) {
num_entries      2122 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	if ((i == smu_data->mc_reg_table.num_entries) && (i > 0))
num_entries      2950 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	PP_ASSERT_WITH_CODE((table->num_entries <= MAX_AC_TIMING_ENTRIES),
num_entries      2958 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      2967 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 	ni_table->num_entries = table->num_entries;
num_entries      2990 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      3002 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      3017 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 				for (k = 0; k < table->num_entries; k++)
num_entries      3029 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      3053 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.c 		for (j = 1; j < table->num_entries; j++) {
num_entries        56 drivers/gpu/drm/amd/powerplay/smumgr/tonga_smumgr.h 	uint8_t   num_entries;        /* number of entries in mc_reg_table_entry used*/
num_entries       736 drivers/gpu/drm/bridge/sil-sii8620.c 	d->num_entries = 1;
num_entries      1741 drivers/gpu/drm/bridge/sil-sii8620.c 	d->num_entries = 1;
num_entries      1253 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		const u8 num_entries = execlists->csb_size;
num_entries      1260 drivers/gpu/drm/i915/gt/intel_engine_cs.c 			   num_entries);
num_entries      1270 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		if (read >= num_entries)
num_entries      1272 drivers/gpu/drm/i915/gt/intel_engine_cs.c 		if (write >= num_entries)
num_entries      1275 drivers/gpu/drm/i915/gt/intel_engine_cs.c 			write += num_entries;
num_entries      1277 drivers/gpu/drm/i915/gt/intel_engine_cs.c 			idx = ++read % num_entries;
num_entries      1490 drivers/gpu/drm/i915/gt/intel_lrc.c 	const u8 num_entries = execlists->csb_size;
num_entries      1524 drivers/gpu/drm/i915/gt/intel_lrc.c 		if (++head == num_entries)
num_entries      1615 drivers/gpu/drm/i915/gt/intel_lrc.c 	invalidate_csb_entries(&buf[0], &buf[num_entries - 1]);
num_entries      2787 drivers/gpu/drm/i915/gvt/gtt.c 	u32 num_entries;
num_entries      2793 drivers/gpu/drm/i915/gvt/gtt.c 	num_entries = vgpu_aperture_sz(vgpu) >> PAGE_SHIFT;
num_entries      2794 drivers/gpu/drm/i915/gvt/gtt.c 	while (num_entries--) {
num_entries      2803 drivers/gpu/drm/i915/gvt/gtt.c 	num_entries = vgpu_hidden_sz(vgpu) >> PAGE_SHIFT;
num_entries      2804 drivers/gpu/drm/i915/gvt/gtt.c 	while (num_entries--) {
num_entries      1612 drivers/gpu/drm/i915/i915_gem_gtt.c 	unsigned int num_entries = length / I915_GTT_PAGE_SIZE;
num_entries      1614 drivers/gpu/drm/i915/i915_gem_gtt.c 	while (num_entries) {
num_entries      1617 drivers/gpu/drm/i915/i915_gem_gtt.c 		const unsigned int count = min(num_entries, GEN6_PTES - pte);
num_entries      1622 drivers/gpu/drm/i915/i915_gem_gtt.c 		num_entries -= count;
num_entries      2268 drivers/gpu/drm/i915/i915_gem_gtt.c 	unsigned num_entries = length / I915_GTT_PAGE_SIZE;
num_entries      2275 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (WARN(num_entries > max_entries,
num_entries      2277 drivers/gpu/drm/i915/i915_gem_gtt.c 		 first_entry, num_entries, max_entries))
num_entries      2278 drivers/gpu/drm/i915/i915_gem_gtt.c 		num_entries = max_entries;
num_entries      2280 drivers/gpu/drm/i915/i915_gem_gtt.c 	for (i = 0; i < num_entries; i++)
num_entries      2383 drivers/gpu/drm/i915/i915_gem_gtt.c 	unsigned num_entries = length / I915_GTT_PAGE_SIZE;
num_entries      2389 drivers/gpu/drm/i915/i915_gem_gtt.c 	if (WARN(num_entries > max_entries,
num_entries      2391 drivers/gpu/drm/i915/i915_gem_gtt.c 		 first_entry, num_entries, max_entries))
num_entries      2392 drivers/gpu/drm/i915/i915_gem_gtt.c 		num_entries = max_entries;
num_entries      2395 drivers/gpu/drm/i915/i915_gem_gtt.c 	for (i = 0; i < num_entries; i++)
num_entries       153 drivers/gpu/drm/i915/intel_csr.c 	u32 num_entries;
num_entries       335 drivers/gpu/drm/i915/intel_csr.c 			      unsigned int num_entries,
num_entries       342 drivers/gpu/drm/i915/intel_csr.c 	for (i = 0; i < num_entries; i++) {
num_entries       487 drivers/gpu/drm/i915/intel_csr.c 	u32 num_entries, max_entries, dmc_offset;
num_entries       517 drivers/gpu/drm/i915/intel_csr.c 	num_entries = package_header->num_entries;
num_entries       518 drivers/gpu/drm/i915/intel_csr.c 	if (WARN_ON(package_header->num_entries > max_entries))
num_entries       519 drivers/gpu/drm/i915/intel_csr.c 		num_entries = max_entries;
num_entries       523 drivers/gpu/drm/i915/intel_csr.c 	dmc_offset = find_dmc_fw_offset(fw_info, num_entries, si,
num_entries      5253 drivers/gpu/drm/i915/intel_pm.c 				 int num_entries, int ignore_idx)
num_entries      5257 drivers/gpu/drm/i915/intel_pm.c 	for (i = 0; i < num_entries; i++) {
num_entries        65 drivers/gpu/drm/i915/intel_pm.h 				 int num_entries, int ignore_idx);
num_entries       705 drivers/gpu/drm/nouveau/nouveau_bios.c 	uint8_t version, headerlen, entrylen, num_entries;
num_entries       729 drivers/gpu/drm/nouveau/nouveau_bios.c 	num_entries = bios->data[load_table_ptr + 3];
num_entries       731 drivers/gpu/drm/nouveau/nouveau_bios.c 	if (headerlen != 4 || entrylen != 4 || num_entries != 2) {
num_entries      1907 drivers/gpu/drm/radeon/btc_dpm.c 		for (j = 1; j < table->num_entries; j++) {
num_entries      1930 drivers/gpu/drm/radeon/btc_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      1943 drivers/gpu/drm/radeon/btc_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      1959 drivers/gpu/drm/radeon/btc_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      1999 drivers/gpu/drm/radeon/btc_dpm.c 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
num_entries      2006 drivers/gpu/drm/radeon/btc_dpm.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      2013 drivers/gpu/drm/radeon/btc_dpm.c 	eg_table->num_entries = table->num_entries;
num_entries      4351 drivers/gpu/drm/radeon/ci_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      4362 drivers/gpu/drm/radeon/ci_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      4375 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4388 drivers/gpu/drm/radeon/ci_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      4485 drivers/gpu/drm/radeon/ci_dpm.c 		for (j = 1; j < table->num_entries; j++) {
num_entries      4514 drivers/gpu/drm/radeon/ci_dpm.c 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
num_entries      4522 drivers/gpu/drm/radeon/ci_dpm.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      4529 drivers/gpu/drm/radeon/ci_dpm.c 	ci_table->num_entries = table->num_entries;
num_entries      4552 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4561 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4570 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4579 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4586 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4598 drivers/gpu/drm/radeon/ci_dpm.c 				for (k = 0; k < table->num_entries; k++) {
num_entries      4706 drivers/gpu/drm/radeon/ci_dpm.c 				    u32 num_entries, u32 valid_flag)
num_entries      4710 drivers/gpu/drm/radeon/ci_dpm.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries      4725 drivers/gpu/drm/radeon/ci_dpm.c 	for(i = 0; i < pi->mc_reg_table.num_entries; i++) {
num_entries      4730 drivers/gpu/drm/radeon/ci_dpm.c 	if ((i == pi->mc_reg_table.num_entries) && (i > 0))
num_entries        84 drivers/gpu/drm/radeon/ci_dpm.h 	u8 num_entries;
num_entries       816 drivers/gpu/drm/radeon/cypress_dpm.c 					 u32 num_entries, u32 valid_flag)
num_entries       820 drivers/gpu/drm/radeon/cypress_dpm.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries       835 drivers/gpu/drm/radeon/cypress_dpm.c 	for (i = 0; i < eg_pi->mc_reg_table.num_entries; i++) {
num_entries       841 drivers/gpu/drm/radeon/cypress_dpm.c 	if ((i == eg_pi->mc_reg_table.num_entries) && (i > 0))
num_entries      1049 drivers/gpu/drm/radeon/cypress_dpm.c 	for (i = 0; i < range_table->num_entries; i++) {
num_entries      1057 drivers/gpu/drm/radeon/cypress_dpm.c 	eg_pi->mc_reg_table.num_entries = range_table->num_entries;
num_entries      1061 drivers/gpu/drm/radeon/cypress_dpm.c 		for (j = 1; j < range_table->num_entries; j++) {
num_entries        36 drivers/gpu/drm/radeon/cypress_dpm.h 	u8 num_entries;
num_entries       567 drivers/gpu/drm/radeon/kv_dpm.c 		for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries       571 drivers/gpu/drm/radeon/kv_dpm.c 		return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
num_entries       590 drivers/gpu/drm/radeon/kv_dpm.c 		for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries       595 drivers/gpu/drm/radeon/kv_dpm.c 		return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
num_entries      2724 drivers/gpu/drm/radeon/ni_dpm.c 			for (k = 0; k < table->num_entries; k++)
num_entries      2735 drivers/gpu/drm/radeon/ni_dpm.c 			for(k = 0; k < table->num_entries; k++) {
num_entries      2750 drivers/gpu/drm/radeon/ni_dpm.c 			for (k = 0; k < table->num_entries; k++)
num_entries      2825 drivers/gpu/drm/radeon/ni_dpm.c 		for (j = 1; j < table->num_entries; j++) {
num_entries      2852 drivers/gpu/drm/radeon/ni_dpm.c 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
num_entries      2859 drivers/gpu/drm/radeon/ni_dpm.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      2866 drivers/gpu/drm/radeon/ni_dpm.c 	ni_table->num_entries = table->num_entries;
num_entries      2945 drivers/gpu/drm/radeon/ni_dpm.c 				    u32 num_entries, u32 valid_flag)
num_entries      2949 drivers/gpu/drm/radeon/ni_dpm.c 	for (i = 0, j = 0; j < num_entries; j++) {
num_entries      2964 drivers/gpu/drm/radeon/ni_dpm.c 	for (i = 0; i < ni_pi->mc_reg_table.num_entries; i++) {
num_entries      2969 drivers/gpu/drm/radeon/ni_dpm.c 	if ((i == ni_pi->mc_reg_table.num_entries) && (i > 0))
num_entries        54 drivers/gpu/drm/radeon/ni_dpm.h 	u8 num_entries;
num_entries      3948 drivers/gpu/drm/radeon/radeon_atombios.c 					mclk_range_table->num_entries = (u8)
num_entries      3952 drivers/gpu/drm/radeon/radeon_atombios.c 					for (i = 0; i < mclk_range_table->num_entries; i++) {
num_entries      3990 drivers/gpu/drm/radeon/radeon_atombios.c 	u8 frev, crev, num_entries, t_mem_id, num_ranges = 0;
num_entries      4017 drivers/gpu/drm/radeon/radeon_atombios.c 					num_entries = (u8)((le16_to_cpu(reg_block->usRegIndexTblSize)) /
num_entries      4019 drivers/gpu/drm/radeon/radeon_atombios.c 					if (num_entries > VBIOS_MC_REGISTER_ARRAY_SIZE)
num_entries      4021 drivers/gpu/drm/radeon/radeon_atombios.c 					while (i < num_entries) {
num_entries      4058 drivers/gpu/drm/radeon/radeon_atombios.c 					reg_table->num_entries = num_ranges;
num_entries       646 drivers/gpu/drm/radeon/radeon_mode.h 	u8 num_entries;
num_entries       666 drivers/gpu/drm/radeon/radeon_mode.h 	u8 num_entries;
num_entries      5371 drivers/gpu/drm/radeon/si_dpm.c 			for (k = 0; k < table->num_entries; k++)
num_entries      5382 drivers/gpu/drm/radeon/si_dpm.c 			for (k = 0; k < table->num_entries; k++) {
num_entries      5396 drivers/gpu/drm/radeon/si_dpm.c 				for (k = 0; k < table->num_entries; k++)
num_entries      5408 drivers/gpu/drm/radeon/si_dpm.c 			for(k = 0; k < table->num_entries; k++)
num_entries      5486 drivers/gpu/drm/radeon/si_dpm.c 		for (j = 1; j < table->num_entries; j++) {
num_entries      5513 drivers/gpu/drm/radeon/si_dpm.c 	if (table->num_entries > MAX_AC_TIMING_ENTRIES)
num_entries      5520 drivers/gpu/drm/radeon/si_dpm.c 	for (i = 0; i < table->num_entries; i++) {
num_entries      5528 drivers/gpu/drm/radeon/si_dpm.c 	si_table->num_entries = table->num_entries;
num_entries      5605 drivers/gpu/drm/radeon/si_dpm.c 				    u32 num_entries, u32 valid_flag)
num_entries      5609 drivers/gpu/drm/radeon/si_dpm.c 	for(i = 0, j = 0; j < num_entries; j++) {
num_entries      5624 drivers/gpu/drm/radeon/si_dpm.c 	for (i = 0; i < si_pi->mc_reg_table.num_entries; i++) {
num_entries      5629 drivers/gpu/drm/radeon/si_dpm.c 	if ((i == si_pi->mc_reg_table.num_entries) && (i > 0))
num_entries       114 drivers/gpu/drm/radeon/si_dpm.h 	u8 num_entries;
num_entries      1534 drivers/gpu/drm/radeon/sumo_dpm.c 	for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries      1539 drivers/gpu/drm/radeon/sumo_dpm.c 	return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_7bit;
num_entries      1549 drivers/gpu/drm/radeon/sumo_dpm.c 	for (i = 0; i < vid_mapping_table->num_entries; i++) {
num_entries      1554 drivers/gpu/drm/radeon/sumo_dpm.c 	return vid_mapping_table->entries[vid_mapping_table->num_entries - 1].vid_2bit;
num_entries      1645 drivers/gpu/drm/radeon/sumo_dpm.c 	vid_mapping_table->num_entries = i;
num_entries        66 drivers/gpu/drm/radeon/sumo_dpm.h 	u32 num_entries;
num_entries       575 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	u32 num_entries;
num_entries       599 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	num_entries = PAGE_SIZE / co_info[type].size;
num_entries       600 drivers/gpu/drm/vmwgfx/vmwgfx_cotable.c 	if (num_entries < co_info[type].min_initial_entries) {
num_entries      3114 drivers/infiniband/hw/bnxt_re/ib_verbs.c int bnxt_re_poll_cq(struct ib_cq *ib_cq, int num_entries, struct ib_wc *wc)
num_entries      3127 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	budget = min_t(u32, num_entries, cq->max_cql);
num_entries      3128 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	num_entries = budget;
num_entries      3243 drivers/infiniband/hw/bnxt_re/ib_verbs.c 	return num_entries - budget;
num_entries       196 drivers/infiniband/hw/bnxt_re/ib_verbs.h int bnxt_re_poll_cq(struct ib_cq *cq, int num_entries, struct ib_wc *wc);
num_entries       198 drivers/infiniband/hw/cxgb3/iwch_cq.c int iwch_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       210 drivers/infiniband/hw/cxgb3/iwch_cq.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries       333 drivers/infiniband/hw/cxgb3/iwch_provider.h int iwch_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries       949 drivers/infiniband/hw/cxgb4/cq.c int c4iw_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       959 drivers/infiniband/hw/cxgb4/cq.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries       970 drivers/infiniband/hw/cxgb4/iw_cxgb4.h int c4iw_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries       974 drivers/infiniband/hw/hns/hns_roce_device.h 	int (*poll_cq)(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries      2387 drivers/infiniband/hw/hns/hns_roce_hw_v1.c int hns_roce_v1_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries      2397 drivers/infiniband/hw/hns/hns_roce_hw_v1.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries      1096 drivers/infiniband/hw/hns/hns_roce_hw_v1.h int hns_roce_v1_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries      2971 drivers/infiniband/hw/hns/hns_roce_hw_v2.c static int hns_roce_v2_poll_cq(struct ib_cq *ibcq, int num_entries,
num_entries      2981 drivers/infiniband/hw/hns/hns_roce_hw_v2.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries      2343 drivers/infiniband/hw/i40iw/i40iw_verbs.c 			 int num_entries,
num_entries      2359 drivers/infiniband/hw/i40iw/i40iw_verbs.c 	while (cqe_count < num_entries) {
num_entries       612 drivers/infiniband/hw/mlx4/cq.c static void mlx4_ib_qp_sw_comp(struct mlx4_ib_qp *qp, int num_entries,
num_entries       625 drivers/infiniband/hw/mlx4/cq.c 	for (i = 0;  i < cur && *npolled < num_entries; i++) {
num_entries       636 drivers/infiniband/hw/mlx4/cq.c static void mlx4_ib_poll_sw_comp(struct mlx4_ib_cq *cq, int num_entries,
num_entries       646 drivers/infiniband/hw/mlx4/cq.c 		mlx4_ib_qp_sw_comp(qp, num_entries, wc + *npolled, npolled, 1);
num_entries       647 drivers/infiniband/hw/mlx4/cq.c 		if (*npolled >= num_entries)
num_entries       652 drivers/infiniband/hw/mlx4/cq.c 		mlx4_ib_qp_sw_comp(qp, num_entries, wc + *npolled, npolled, 0);
num_entries       653 drivers/infiniband/hw/mlx4/cq.c 		if (*npolled >= num_entries)
num_entries       881 drivers/infiniband/hw/mlx4/cq.c int mlx4_ib_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       891 drivers/infiniband/hw/mlx4/cq.c 		mlx4_ib_poll_sw_comp(cq, num_entries, wc, &npolled);
num_entries       895 drivers/infiniband/hw/mlx4/cq.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries       749 drivers/infiniband/hw/mlx4/mlx4_ib.h int mlx4_ib_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries       386 drivers/infiniband/hw/mlx5/cq.c static void sw_comp(struct mlx5_ib_qp *qp, int num_entries, struct ib_wc *wc,
num_entries       401 drivers/infiniband/hw/mlx5/cq.c 	for (i = 0;  i < cur && np < num_entries; i++) {
num_entries       419 drivers/infiniband/hw/mlx5/cq.c static void mlx5_ib_poll_sw_comp(struct mlx5_ib_cq *cq, int num_entries,
num_entries       427 drivers/infiniband/hw/mlx5/cq.c 		sw_comp(qp, num_entries, wc + *npolled, npolled, true);
num_entries       428 drivers/infiniband/hw/mlx5/cq.c 		if (*npolled >= num_entries)
num_entries       433 drivers/infiniband/hw/mlx5/cq.c 		sw_comp(qp, num_entries, wc + *npolled, npolled, false);
num_entries       434 drivers/infiniband/hw/mlx5/cq.c 		if (*npolled >= num_entries)
num_entries       570 drivers/infiniband/hw/mlx5/cq.c static int poll_soft_wc(struct mlx5_ib_cq *cq, int num_entries,
num_entries       578 drivers/infiniband/hw/mlx5/cq.c 		if (npolled >= num_entries)
num_entries       596 drivers/infiniband/hw/mlx5/cq.c int mlx5_ib_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       610 drivers/infiniband/hw/mlx5/cq.c 			soft_polled = poll_soft_wc(cq, num_entries, wc, true);
num_entries       612 drivers/infiniband/hw/mlx5/cq.c 		mlx5_ib_poll_sw_comp(cq, num_entries - soft_polled,
num_entries       618 drivers/infiniband/hw/mlx5/cq.c 		soft_polled = poll_soft_wc(cq, num_entries, wc, false);
num_entries       620 drivers/infiniband/hw/mlx5/cq.c 	for (npolled = 0; npolled < num_entries - soft_polled; npolled++) {
num_entries      3240 drivers/infiniband/hw/mlx5/main.c 					   int num_entries, int num_groups,
num_entries      3246 drivers/infiniband/hw/mlx5/main.c 						 num_entries,
num_entries      3266 drivers/infiniband/hw/mlx5/main.c 	int num_entries;
num_entries      3306 drivers/infiniband/hw/mlx5/main.c 		num_entries = MLX5_FS_MAX_ENTRIES;
num_entries      3313 drivers/infiniband/hw/mlx5/main.c 					 &num_entries,
num_entries      3327 drivers/infiniband/hw/mlx5/main.c 		num_entries = 1;
num_entries      3334 drivers/infiniband/hw/mlx5/main.c 	max_table_size = min_t(int, num_entries, max_table_size);
num_entries      1143 drivers/infiniband/hw/mlx5/mlx5_ib.h int mlx5_ib_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries      4669 drivers/infiniband/hw/mlx5/qp.c 		sblock_ctrl->num_entries = cpu_to_be16(2);
num_entries       654 drivers/infiniband/hw/mthca/mthca_cq.c int mthca_poll_cq(struct ib_cq *ibcq, int num_entries,
num_entries       669 drivers/infiniband/hw/mthca/mthca_cq.c 	while (npolled < num_entries) {
num_entries       494 drivers/infiniband/hw/mthca/mthca_dev.h int mthca_poll_cq(struct ib_cq *ibcq, int num_entries,
num_entries      1620 drivers/infiniband/hw/ocrdma/ocrdma_hw.c static int ocrdma_build_q_conf(u32 *num_entries, int entry_size,
num_entries      1626 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	*num_entries = roundup_pow_of_two(*num_entries);
num_entries      1627 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	mem_size = *num_entries * entry_size;
num_entries      1640 drivers/infiniband/hw/ocrdma/ocrdma_hw.c 	*num_entries = mem_size / entry_size;
num_entries      2767 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static int ocrdma_poll_hwcq(struct ocrdma_cq *cq, int num_entries,
num_entries      2780 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	while (num_entries) {
num_entries      2811 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			num_entries -= 1;
num_entries      2827 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c static int ocrdma_add_err_cqe(struct ocrdma_cq *cq, int num_entries,
num_entries      2832 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	while (num_entries) {
num_entries      2848 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 		num_entries -= 1;
num_entries      2853 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c int ocrdma_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries      2855 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	int cqes_to_poll = num_entries;
num_entries        51 drivers/infiniband/hw/ocrdma/ocrdma_verbs.h int ocrdma_poll_cq(struct ib_cq *, int num_entries, struct ib_wc *wc);
num_entries       672 drivers/infiniband/hw/qedr/qedr_roce_cm.c int qedr_gsi_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       683 drivers/infiniband/hw/qedr/qedr_roce_cm.c 	while (i < num_entries && qp->rq.cons != qp->rq.gsi_cons) {
num_entries       711 drivers/infiniband/hw/qedr/qedr_roce_cm.c 	while (i < num_entries && qp->sq.cons != qp->sq.gsi_cons) {
num_entries       727 drivers/infiniband/hw/qedr/qedr_roce_cm.c 		 num_entries, i, qp->rq.cons, qp->rq.gsi_cons, qp->sq.cons,
num_entries        48 drivers/infiniband/hw/qedr/qedr_roce_cm.h int qedr_gsi_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries      3698 drivers/infiniband/hw/qedr/verbs.c 		       struct qedr_cq *cq, int num_entries,
num_entries      3704 drivers/infiniband/hw/qedr/verbs.c 	while (num_entries && qp->sq.wqe_cons != hw_cons) {
num_entries      3740 drivers/infiniband/hw/qedr/verbs.c 		num_entries--;
num_entries      3754 drivers/infiniband/hw/qedr/verbs.c 			    int num_entries, struct ib_wc *wc,
num_entries      3761 drivers/infiniband/hw/qedr/verbs.c 		cnt = process_req(dev, qp, cq, num_entries, wc, req->sq_cons,
num_entries      3769 drivers/infiniband/hw/qedr/verbs.c 		cnt = process_req(dev, qp, cq, num_entries, wc, req->sq_cons,
num_entries      3775 drivers/infiniband/hw/qedr/verbs.c 		cnt = process_req(dev, qp, cq, num_entries, wc,
num_entries      3779 drivers/infiniband/hw/qedr/verbs.c 		if (cnt < num_entries) {
num_entries      3979 drivers/infiniband/hw/qedr/verbs.c 			      int num_entries, struct ib_wc *wc, u16 hw_cons)
num_entries      3983 drivers/infiniband/hw/qedr/verbs.c 	while (num_entries && qp->rq.wqe_cons != hw_cons) {
num_entries      3992 drivers/infiniband/hw/qedr/verbs.c 		num_entries--;
num_entries      4013 drivers/infiniband/hw/qedr/verbs.c 				 struct qedr_cq *cq, int num_entries,
num_entries      4026 drivers/infiniband/hw/qedr/verbs.c 			     struct qedr_cq *cq, int num_entries,
num_entries      4033 drivers/infiniband/hw/qedr/verbs.c 		cnt = process_resp_flush(qp, cq, num_entries, wc,
num_entries      4054 drivers/infiniband/hw/qedr/verbs.c int qedr_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries      4072 drivers/infiniband/hw/qedr/verbs.c 		return qedr_gsi_poll_cq(ibcq, num_entries, wc);
num_entries      4077 drivers/infiniband/hw/qedr/verbs.c 	while (num_entries && is_valid_cqe(cq, cqe)) {
num_entries      4094 drivers/infiniband/hw/qedr/verbs.c 			cnt = qedr_poll_cq_req(dev, qp, cq, num_entries, wc,
num_entries      4099 drivers/infiniband/hw/qedr/verbs.c 			cnt = qedr_poll_cq_resp(dev, qp, cq, num_entries, wc,
num_entries      4103 drivers/infiniband/hw/qedr/verbs.c 			cnt = qedr_poll_cq_resp_srq(dev, qp, cq, num_entries,
num_entries      4112 drivers/infiniband/hw/qedr/verbs.c 		num_entries -= cnt;
num_entries        89 drivers/infiniband/hw/qedr/verbs.h int qedr_poll_cq(struct ib_cq *, int num_entries, struct ib_wc *wc);
num_entries       382 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c int pvrdma_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       389 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 	if (num_entries < 1 || wc == NULL)
num_entries       393 drivers/infiniband/hw/vmw_pvrdma/pvrdma_cq.c 	for (npolled = 0; npolled < num_entries; ++npolled) {
num_entries       415 drivers/infiniband/hw/vmw_pvrdma/pvrdma_verbs.h int pvrdma_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc);
num_entries       517 drivers/infiniband/sw/rdmavt/cq.c int rvt_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *entry)
num_entries       535 drivers/infiniband/sw/rdmavt/cq.c 	for (npolled = 0; npolled < num_entries; ++npolled, ++entry) {
num_entries        59 drivers/infiniband/sw/rdmavt/cq.h int rvt_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *entry);
num_entries       209 drivers/infiniband/sw/rdmavt/qp.c 	wss->num_entries = table_bits / BITS_PER_LONG;
num_entries       218 drivers/infiniband/sw/rdmavt/qp.c 	wss->entries = kcalloc_node(wss->num_entries, sizeof(*wss->entries),
num_entries       273 drivers/infiniband/sw/rdmavt/qp.c 			& (wss->num_entries - 1);
num_entries       847 drivers/infiniband/sw/rxe/rxe_verbs.c static int rxe_poll_cq(struct ib_cq *ibcq, int num_entries, struct ib_wc *wc)
num_entries       855 drivers/infiniband/sw/rxe/rxe_verbs.c 	for (i = 0; i < num_entries; i++) {
num_entries        67 drivers/infiniband/sw/siw/siw_verbs.h int siw_poll_cq(struct ib_cq *base_cq, int num_entries, struct ib_wc *wc);
num_entries       166 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	lnum_entries = be16_to_cpu(tbl->num_entries);
num_entries       222 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.c 	lnum_entries = be16_to_cpu(tbl->num_entries);
num_entries       259 drivers/infiniband/ulp/opa_vnic/opa_vnic_encap.h 	__be16                              num_entries;
num_entries       166 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c 	u16 offset, num_entries;
num_entries       171 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c 	num_entries = be16_to_cpu(mac_tbl->num_entries);
num_entries       173 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c 	return ((num_entries <= req_entries) &&
num_entries       174 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c 		(offset + num_entries <= OPA_VNIC_MAC_TBL_MAX_ENTRIES));
num_entries       372 drivers/infiniband/ulp/opa_vnic/opa_vnic_vema.c 		mac_tbl_out->num_entries = mac_tbl_in->num_entries;
num_entries       296 drivers/iommu/io-pgtable-arm-v7s.c static void __arm_v7s_pte_sync(arm_v7s_iopte *ptep, int num_entries,
num_entries       303 drivers/iommu/io-pgtable-arm-v7s.c 				   num_entries * sizeof(*ptep), DMA_TO_DEVICE);
num_entries       306 drivers/iommu/io-pgtable-arm-v7s.c 			      int num_entries, struct io_pgtable_cfg *cfg)
num_entries       310 drivers/iommu/io-pgtable-arm-v7s.c 	for (i = 0; i < num_entries; i++)
num_entries       313 drivers/iommu/io-pgtable-arm-v7s.c 	__arm_v7s_pte_sync(ptep, num_entries, cfg);
num_entries       414 drivers/iommu/io-pgtable-arm-v7s.c 			    int lvl, int num_entries, arm_v7s_iopte *ptep)
num_entries       420 drivers/iommu/io-pgtable-arm-v7s.c 	for (i = 0; i < num_entries; i++)
num_entries       440 drivers/iommu/io-pgtable-arm-v7s.c 	if (num_entries > 1)
num_entries       445 drivers/iommu/io-pgtable-arm-v7s.c 	__arm_v7s_set_pte(ptep, pte, num_entries, cfg);
num_entries       479 drivers/iommu/io-pgtable-arm-v7s.c 	int num_entries = size >> ARM_V7S_LVL_SHIFT(lvl);
num_entries       485 drivers/iommu/io-pgtable-arm-v7s.c 	if (num_entries)
num_entries       487 drivers/iommu/io-pgtable-arm-v7s.c 					lvl, num_entries, ptep);
num_entries       601 drivers/iommu/io-pgtable-arm-v7s.c 	int i, unmap_idx, num_entries, num_ptes;
num_entries       608 drivers/iommu/io-pgtable-arm-v7s.c 	num_entries = size >> ARM_V7S_LVL_SHIFT(2);
num_entries       612 drivers/iommu/io-pgtable-arm-v7s.c 	if (num_entries > 1)
num_entries       615 drivers/iommu/io-pgtable-arm-v7s.c 	for (i = 0; i < num_ptes; i += num_entries, pte += size) {
num_entries       620 drivers/iommu/io-pgtable-arm-v7s.c 		__arm_v7s_set_pte(&tablep[i], pte, num_entries, cfg);
num_entries       645 drivers/iommu/io-pgtable-arm-v7s.c 	int idx, i = 0, num_entries = size >> ARM_V7S_LVL_SHIFT(lvl);
num_entries       657 drivers/iommu/io-pgtable-arm-v7s.c 	} while (++i < num_entries);
num_entries       669 drivers/iommu/io-pgtable-arm-v7s.c 	if (num_entries <= 1 && arm_v7s_pte_is_cont(pte[0], lvl)) {
num_entries       678 drivers/iommu/io-pgtable-arm-v7s.c 	if (num_entries) {
num_entries       681 drivers/iommu/io-pgtable-arm-v7s.c 		__arm_v7s_set_pte(ptep, 0, num_entries, &iop->cfg);
num_entries       683 drivers/iommu/io-pgtable-arm-v7s.c 		for (i = 0; i < num_entries; i++) {
num_entries       473 drivers/iommu/omap-iommu.c 			      unsigned long offset, int num_entries)
num_entries       475 drivers/iommu/omap-iommu.c 	size_t size = num_entries * sizeof(u32);
num_entries       623 drivers/media/i2c/m5mols/m5mols_core.c 	fd->num_entries = 1;
num_entries       638 drivers/media/i2c/m5mols/m5mols_core.c 	fd->num_entries = 1;
num_entries      1154 drivers/media/i2c/s5c73m3/s5c73m3-core.c 	fd->num_entries = 2;
num_entries      1155 drivers/media/i2c/s5c73m3/s5c73m3-core.c 	for (i = 0; i < fd->num_entries; i++)
num_entries      1175 drivers/media/i2c/s5c73m3/s5c73m3-core.c 	fd->num_entries = 2;
num_entries      1178 drivers/media/i2c/s5c73m3/s5c73m3-core.c 	for (i = 0; i < fd->num_entries; i++)
num_entries       883 drivers/media/platform/exynos4-is/fimc-capture.c 	if (num_planes != fd.num_entries)
num_entries       941 drivers/media/platform/qcom/venus/hfi_helper.h 	u32 num_entries;
num_entries      1075 drivers/media/platform/qcom/venus/hfi_helper.h 	u32 num_entries;
num_entries        66 drivers/media/platform/qcom/venus/hfi_parser.c 	u32 num_entries = mode->num_entries;
num_entries        69 drivers/media/platform/qcom/venus/hfi_parser.c 	if (num_entries > MAX_ALLOC_MODE_ENTRIES)
num_entries        74 drivers/media/platform/qcom/venus/hfi_parser.c 	while (num_entries--) {
num_entries       116 drivers/media/platform/vsp1/vsp1_dl.c 	unsigned int num_entries;
num_entries       249 drivers/media/platform/vsp1/vsp1_dl.c 			 unsigned int num_entries, size_t extra_size)
num_entries       267 drivers/media/platform/vsp1/vsp1_dl.c 	dlb_size = num_entries * sizeof(struct vsp1_dl_entry) + extra_size;
num_entries       291 drivers/media/platform/vsp1/vsp1_dl.c 		dlb->max_entries = num_entries;
num_entries       363 drivers/media/platform/vsp1/vsp1_dl.c 	dlb->num_entries = 0;
num_entries       382 drivers/media/platform/vsp1/vsp1_dl.c 	if (WARN_ONCE(dlb->num_entries >= dlb->max_entries,
num_entries       386 drivers/media/platform/vsp1/vsp1_dl.c 	dlb->entries[dlb->num_entries].addr = reg;
num_entries       387 drivers/media/platform/vsp1/vsp1_dl.c 	dlb->entries[dlb->num_entries].data = data;
num_entries       388 drivers/media/platform/vsp1/vsp1_dl.c 	dlb->num_entries++;
num_entries       656 drivers/media/platform/vsp1/vsp1_dl.c 	dl->body0->num_entries = 0;
num_entries       762 drivers/media/platform/vsp1/vsp1_dl.c 	hdr->num_bytes = dl->body0->num_entries
num_entries       770 drivers/media/platform/vsp1/vsp1_dl.c 		hdr->num_bytes = dlb->num_entries
num_entries        70 drivers/media/platform/vsp1/vsp1_dl.h 			 unsigned int num_entries, size_t extra_size);
num_entries       197 drivers/misc/mic/host/mic_smpt.c 	int num_entries;
num_entries       208 drivers/misc/mic/host/mic_smpt.c 	num_entries = mic_get_smpt_ref_count(mdev, dma_addr, size,
num_entries       212 drivers/misc/mic/host/mic_smpt.c 	mic_addr = mic_smpt_op(mdev, smpt_start, num_entries, ref, size);
num_entries        98 drivers/net/dsa/sja1105/sja1105_tas.c 	int num_entries = 0;
num_entries       135 drivers/net/dsa/sja1105/sja1105_tas.c 			num_entries += tas_data->offload[port]->num_entries;
num_entries       148 drivers/net/dsa/sja1105/sja1105_tas.c 	table->entries = kcalloc(num_entries, table->ops->unpacked_entry_size,
num_entries       152 drivers/net/dsa/sja1105/sja1105_tas.c 	table->entry_count = num_entries;
num_entries       198 drivers/net/dsa/sja1105/sja1105_tas.c 		schedule_end_idx = k + offload->num_entries - 1;
num_entries       219 drivers/net/dsa/sja1105/sja1105_tas.c 		for (i = 0; i < offload->num_entries; i++, k++) {
num_entries       298 drivers/net/dsa/sja1105/sja1105_tas.c 	     i < offload->num_entries;
num_entries       304 drivers/net/dsa/sja1105/sja1105_tas.c 		     j < admin->num_entries;
num_entries       374 drivers/net/dsa/sja1105/sja1105_tas.c 	for (i = 0; i < admin->num_entries; i++) {
num_entries       335 drivers/net/ethernet/3com/typhoon.c typhoon_inc_index(u32 *index, const int count, const int num_entries)
num_entries       342 drivers/net/ethernet/3com/typhoon.c 	*index %= num_entries * sizeof(struct cmd_desc);
num_entries       281 drivers/net/ethernet/agere/et131x.c 	u32		 num_entries;
num_entries      1597 drivers/net/ethernet/agere/et131x.c 		for (entry = 0; entry < fbr->num_entries; entry++) {
num_entries      1607 drivers/net/ethernet/agere/et131x.c 		writel(fbr->num_entries - 1, num_des);
num_entries      1614 drivers/net/ethernet/agere/et131x.c 		writel(((fbr->num_entries * LO_MARK_PERCENT_FOR_RX) / 100) - 1,
num_entries      1899 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[0]->num_entries = 512;
num_entries      1901 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[1]->num_entries = 512;
num_entries      1904 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[0]->num_entries = 1024;
num_entries      1906 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[1]->num_entries = 512;
num_entries      1909 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[0]->num_entries = 768;
num_entries      1911 drivers/net/ethernet/agere/et131x.c 		rx_ring->fbr[1]->num_entries = 128;
num_entries      1914 drivers/net/ethernet/agere/et131x.c 	rx_ring->psr_entries = rx_ring->fbr[0]->num_entries +
num_entries      1915 drivers/net/ethernet/agere/et131x.c 			       rx_ring->fbr[1]->num_entries;
num_entries      1920 drivers/net/ethernet/agere/et131x.c 		bufsize = sizeof(struct fbr_desc) * fbr->num_entries;
num_entries      1937 drivers/net/ethernet/agere/et131x.c 		for (i = 0; i < fbr->num_entries / FBR_CHUNKS; i++) {
num_entries      2036 drivers/net/ethernet/agere/et131x.c 		for (ii = 0; ii < fbr->num_entries / FBR_CHUNKS; ii++) {
num_entries      2049 drivers/net/ethernet/agere/et131x.c 		bufsize = sizeof(struct fbr_desc) * fbr->num_entries;
num_entries      2139 drivers/net/ethernet/agere/et131x.c 	if (buff_index < fbr->num_entries) {
num_entries      2161 drivers/net/ethernet/agere/et131x.c 						     fbr->num_entries - 1);
num_entries      2237 drivers/net/ethernet/agere/et131x.c 	if (ring_index > 1 || buff_index > fbr->num_entries - 1) {
num_entries      6534 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	__le32 *num_entries;
num_entries      6612 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	for (i = 0, num_entries = &req.tqm_sp_num_entries,
num_entries      6616 drivers/net/ethernet/broadcom/bnxt/bnxt.c 	     i < 9; i++, num_entries++, pg_attr++, pg_dir++, ena <<= 1) {
num_entries      6622 drivers/net/ethernet/broadcom/bnxt/bnxt.c 		*num_entries = cpu_to_le32(ctx_pg->entries);
num_entries      6437 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h 	__le32	num_entries;
num_entries      6481 drivers/net/ethernet/broadcom/bnxt/bnxt_hsi.h 	__le32	num_entries;
num_entries       205 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	int num_entries = 0;
num_entries       222 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		num_entries++;
num_entries       240 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		if (ntohs(cmd->msg_hdr->num_entries) <=
num_entries       464 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	int num_entries;
num_entries       476 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		num_entries = ntohs(msghdr->num_entries);
num_entries       483 drivers/net/ethernet/brocade/bna/bfa_msgq.c 		BFA_MSGQ_INDX_ADD(rspq->consumer_index, num_entries,
num_entries       626 drivers/net/ethernet/brocade/bna/bfa_msgq.c 	if (ntohs(cmd->msg_hdr->num_entries) <=
num_entries       390 drivers/net/ethernet/brocade/bna/bfi.h 	u16	num_entries;
num_entries       412 drivers/net/ethernet/brocade/bna/bna_enet.c 	admin_up_req->mh.num_entries = htons(
num_entries       429 drivers/net/ethernet/brocade/bna/bna_enet.c 	admin_down_req->mh.num_entries = htons(
num_entries       446 drivers/net/ethernet/brocade/bna/bna_enet.c 	lpbk_up_req->mh.num_entries = htons(
num_entries       467 drivers/net/ethernet/brocade/bna/bna_enet.c 	lpbk_down_req->mh.num_entries = htons(
num_entries      1164 drivers/net/ethernet/brocade/bna/bna_enet.c 	pause_req->mh.num_entries = htons(
num_entries      1626 drivers/net/ethernet/brocade/bna/bna_enet.c 	attr_req->mh.num_entries = htons(
num_entries      1848 drivers/net/ethernet/brocade/bna/bna_enet.c 	stats_req->mh.num_entries = htons(
num_entries       185 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       201 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       217 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       232 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       247 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       264 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       287 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       302 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       319 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       338 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries      1622 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->mh.num_entries = htons(
num_entries      1721 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries      3094 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	cfg_req->mh.num_entries = htons(
num_entries      3141 drivers/net/ethernet/brocade/bna/bna_tx_rx.c 	req->mh.num_entries = htons(
num_entries       102 drivers/net/ethernet/emulex/benet/be_cmds.c 	int num_entries = ARRAY_SIZE(cmd_priv_map);
num_entries       105 drivers/net/ethernet/emulex/benet/be_cmds.c 	for (i = 0; i < num_entries; i++)
num_entries       442 drivers/net/ethernet/freescale/enetc/enetc_pf.c 	int num_entries, vf_entries, i;
num_entries       447 drivers/net/ethernet/freescale/enetc/enetc_pf.c 	num_entries = ENETC_PRFSCAPR_GET_NUM_RFS(val);
num_entries       448 drivers/net/ethernet/freescale/enetc/enetc_pf.c 	vf_entries = num_entries / (pf->total_vfs + 1);
num_entries       453 drivers/net/ethernet/freescale/enetc/enetc_pf.c 		      num_entries - vf_entries * pf->total_vfs);
num_entries       268 drivers/net/ethernet/freescale/ucc_geth.c 				  u8 num_entries,
num_entries       278 drivers/net/ethernet/freescale/ucc_geth.c 	for (i = 0; i < num_entries; i++) {
num_entries       307 drivers/net/ethernet/freescale/ucc_geth.c 				    u8 num_entries,
num_entries       315 drivers/net/ethernet/freescale/ucc_geth.c 	for (i = 0; i < num_entries; i++) {
num_entries       341 drivers/net/ethernet/freescale/ucc_geth.c 				  u8 num_entries,
num_entries       350 drivers/net/ethernet/freescale/ucc_geth.c 	for (i = 0; i < num_entries; i++) {
num_entries        49 drivers/net/ethernet/google/gve/gve.h 	u32 num_entries;
num_entries       334 drivers/net/ethernet/google/gve/gve_adminq.c 	u32 num_entries = qpl->num_entries;
num_entries       335 drivers/net/ethernet/google/gve/gve_adminq.c 	u32 size = num_entries * sizeof(qpl->page_buses[0]);
num_entries       347 drivers/net/ethernet/google/gve/gve_adminq.c 	for (i = 0; i < num_entries; i++)
num_entries       353 drivers/net/ethernet/google/gve/gve_adminq.c 		.num_pages = cpu_to_be32(num_entries),
num_entries       547 drivers/net/ethernet/google/gve/gve_main.c 	qpl->num_entries = 0;
num_entries       565 drivers/net/ethernet/google/gve/gve_main.c 		qpl->num_entries++;
num_entries       592 drivers/net/ethernet/google/gve/gve_main.c 	for (i = 0; i < qpl->num_entries; i++)
num_entries       599 drivers/net/ethernet/google/gve/gve_main.c 	priv->num_registered_pages -= qpl->num_entries;
num_entries        31 drivers/net/ethernet/google/gve/gve_tx.c 	fifo->base = vmap(fifo->qpl->pages, fifo->qpl->num_entries, VM_MAP,
num_entries        39 drivers/net/ethernet/google/gve/gve_tx.c 	fifo->size = fifo->qpl->num_entries * PAGE_SIZE;
num_entries       405 drivers/net/ethernet/ibm/ehea/ehea.h 	int num_entries;
num_entries       418 drivers/net/ethernet/ibm/ehea/ehea.h 	int num_entries;
num_entries       223 drivers/net/ethernet/ibm/ehea/ehea_main.c 	ehea_fw_handles.num_entries = i;
num_entries       312 drivers/net/ethernet/ibm/ehea/ehea_main.c 	ehea_bcmc_regs.num_entries = i;
num_entries      3289 drivers/net/ethernet/ibm/ehea/ehea_main.c 		for (i = 0; i < ehea_fw_handles.num_entries; i++)
num_entries      3295 drivers/net/ethernet/ibm/ehea/ehea_main.c 		for (i = 0; i < ehea_bcmc_regs.num_entries; i++)
num_entries       642 drivers/net/ethernet/ibm/ibmvnic.c 			    int num_entries, int buf_size)
num_entries       647 drivers/net/ethernet/ibm/ibmvnic.c 	tx_pool->tx_buff = kcalloc(num_entries,
num_entries       654 drivers/net/ethernet/ibm/ibmvnic.c 				 num_entries * buf_size))
num_entries       657 drivers/net/ethernet/ibm/ibmvnic.c 	tx_pool->free_map = kcalloc(num_entries, sizeof(int), GFP_KERNEL);
num_entries       661 drivers/net/ethernet/ibm/ibmvnic.c 	for (i = 0; i < num_entries; i++)
num_entries       666 drivers/net/ethernet/ibm/ibmvnic.c 	tx_pool->num_buffers = num_entries;
num_entries      1378 drivers/net/ethernet/ibm/ibmvnic.c 				int *num_entries, u8 hdr_field)
num_entries      1386 drivers/net/ethernet/ibm/ibmvnic.c 	*num_entries += create_hdr_descs(hdr_field, hdr_data, tot_len, hdr_len,
num_entries      1424 drivers/net/ethernet/ibm/ibmvnic.c 	int num_entries = 1;
num_entries      1551 drivers/net/ethernet/ibm/ibmvnic.c 		build_hdr_descs_arr(tx_buff, &num_entries, *hdrs);
num_entries      1552 drivers/net/ethernet/ibm/ibmvnic.c 		tx_crq.v1.n_crq_elem = num_entries;
num_entries      1553 drivers/net/ethernet/ibm/ibmvnic.c 		tx_buff->num_entries = num_entries;
num_entries      1570 drivers/net/ethernet/ibm/ibmvnic.c 					       (u64)num_entries);
num_entries      1574 drivers/net/ethernet/ibm/ibmvnic.c 		tx_buff->num_entries = num_entries;
num_entries      1600 drivers/net/ethernet/ibm/ibmvnic.c 	if (atomic_add_return(num_entries, &tx_scrq->used)
num_entries      2914 drivers/net/ethernet/ibm/ibmvnic.c 		int num_entries = 0;
num_entries      2945 drivers/net/ethernet/ibm/ibmvnic.c 			num_entries += txbuff->num_entries;
num_entries      2955 drivers/net/ethernet/ibm/ibmvnic.c 		if (atomic_sub_return(num_entries, &scrq->used) <=
num_entries      3387 drivers/net/ethernet/ibm/ibmvnic.c 				u64 remote_handle, u64 ioba, u64 num_entries)
num_entries      3397 drivers/net/ethernet/ibm/ibmvnic.c 				ioba, num_entries);
num_entries       897 drivers/net/ethernet/ibm/ibmvnic.h 	int num_entries;
num_entries      1900 drivers/net/ethernet/intel/e1000e/ich8lan.c 	u32 num_entries;
num_entries      1908 drivers/net/ethernet/intel/e1000e/ich8lan.c 		num_entries = hw->mac.rar_entry_count;
num_entries      1912 drivers/net/ethernet/intel/e1000e/ich8lan.c 		num_entries = 1;
num_entries      1916 drivers/net/ethernet/intel/e1000e/ich8lan.c 		num_entries = wlock_mac + 1;
num_entries      1920 drivers/net/ethernet/intel/e1000e/ich8lan.c 	return num_entries;
num_entries       178 drivers/net/ethernet/intel/i40e/i40e.h 	u16 num_entries;
num_entries       714 drivers/net/ethernet/intel/i40e/i40e_adminq_cmd.h 	u8	num_entries;         /* reserved for command */
num_entries      1564 drivers/net/ethernet/intel/i40e/i40e_adminq_cmd.h 	__le16 num_entries;
num_entries      2836 drivers/net/ethernet/intel/i40e/i40e_common.c 	cmd->num_entries = cpu_to_le16(count);
num_entries       207 drivers/net/ethernet/intel/i40e/i40e_main.c 	while (i < pile->num_entries) {
num_entries       215 drivers/net/ethernet/intel/i40e/i40e_main.c 		for (j = 0; (j < needed) && ((i+j) < pile->num_entries); j++) {
num_entries       250 drivers/net/ethernet/intel/i40e/i40e_main.c 	if (!pile || index >= pile->num_entries)
num_entries       254 drivers/net/ethernet/intel/i40e/i40e_main.c 	     i < pile->num_entries && pile->list[i] == valid_id;
num_entries      11267 drivers/net/ethernet/intel/i40e/i40e_main.c 	pf->irq_pile->num_entries = vectors;
num_entries      12010 drivers/net/ethernet/intel/i40e/i40e_main.c 	pf->qp_pile->num_entries = pf->hw.func_caps.num_tx_qp;
num_entries       144 drivers/net/ethernet/intel/ice/ice.h 	u16 num_entries;
num_entries       240 drivers/net/ethernet/intel/ice/ice_adminq_cmd.h 	__le16 num_entries; /* Number of Resource entries */
num_entries      1418 drivers/net/ethernet/intel/ice/ice_adminq_cmd.h 	u8 num_entries;
num_entries      2865 drivers/net/ethernet/intel/ice/ice_common.c 	cmd->num_entries = num_qgrps;
num_entries       254 drivers/net/ethernet/intel/ice/ice_controlq.c ice_cfg_cq_regs(struct ice_hw *hw, struct ice_ctl_q_ring *ring, u16 num_entries)
num_entries       261 drivers/net/ethernet/intel/ice/ice_controlq.c 	wr32(hw, ring->len, (num_entries | ring->len_ena_mask));
num_entries      2871 drivers/net/ethernet/intel/ice/ice_lib.c 	if (!needed || needed > res->num_entries || id >= ICE_RES_VALID_BIT) {
num_entries      2874 drivers/net/ethernet/intel/ice/ice_lib.c 			needed, res->num_entries, id);
num_entries      2506 drivers/net/ethernet/intel/ice/ice_main.c 	pf->irq_tracker->num_entries = vectors;
num_entries      2507 drivers/net/ethernet/intel/ice/ice_main.c 	pf->irq_tracker->end = pf->irq_tracker->num_entries;
num_entries        64 drivers/net/ethernet/intel/ice/ice_switch.c ice_aq_alloc_free_res(struct ice_hw *hw, u16 num_entries,
num_entries        76 drivers/net/ethernet/intel/ice/ice_switch.c 	if (buf_size < (num_entries * sizeof(buf->elem[0])))
num_entries        83 drivers/net/ethernet/intel/ice/ice_switch.c 	cmd->num_entries = cpu_to_le16(num_entries);
num_entries       246 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	if (pf->sriov_base_vector < res->num_entries) {
num_entries       247 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 		res->end = res->num_entries;
num_entries       249 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 			res->num_entries - pf->sriov_base_vector;
num_entries       787 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	for (i = res->num_entries - 1; i >= 0; i--)
num_entries       834 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 	if (num_msix_needed > (pf_total_msix_vectors - res->num_entries)) {
num_entries       836 drivers/net/ethernet/intel/ice/ice_virtchnl_pf.c 			res->num_entries - pf->sriov_base_vector;
num_entries       913 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	int entry, num_entries, max_entries;
num_entries       924 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	num_entries = min_t(int, profile->cam_entries, max_entries);
num_entries       925 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	for (entry = 0; entry < num_entries; entry++)
num_entries       930 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	num_entries = min_t(int, profile->action_entries, max_entries);
num_entries       931 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	for (entry = 0; entry < num_entries; entry++)
num_entries       936 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	num_entries = min_t(int, profile->action_entries, profile->cam_entries);
num_entries       938 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 		    NPC_AF_KPUX_ENTRY_DISX(kpu, 0), enable_mask(num_entries));
num_entries       939 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 	if (num_entries > 64) {
num_entries       942 drivers/net/ethernet/marvell/octeontx2/af/rvu_npc.c 			    enable_mask(num_entries - 64));
num_entries      3108 drivers/net/ethernet/mellanox/mlx4/main.c 	int num_entries = dev->caps.num_ports;
num_entries      3111 drivers/net/ethernet/mellanox/mlx4/main.c 	priv->steer = kcalloc(num_entries, sizeof(struct mlx4_steer),
num_entries      3116 drivers/net/ethernet/mellanox/mlx4/main.c 	for (i = 0; i < num_entries; i++)
num_entries      3129 drivers/net/ethernet/mellanox/mlx4/main.c 	int num_entries = dev->caps.num_ports;
num_entries      3132 drivers/net/ethernet/mellanox/mlx4/main.c 	for (i = 0; i < num_entries; i++) {
num_entries       433 drivers/net/ethernet/mellanox/mlx4/mr.c 			  int num_entries)
num_entries       435 drivers/net/ethernet/mellanox/mlx4/mr.c 	return mlx4_cmd(dev, mailbox->dma, num_entries, 0, MLX4_CMD_WRITE_MTT,
num_entries        31 drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.h 					     f->rule->action.num_entries : 0)
num_entries        35 drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.h 				f->rule->action.num_entries : 0);
num_entries        39 drivers/net/ethernet/mellanox/mlx5/core/diag/en_tc_tracepoint.h 						 f->rule->action.num_entries);
num_entries        65 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	unsigned int num_entries;
num_entries       301 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	WARN_ON(mc_record->num_entries);
num_entries       313 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 		unsigned int num_entries = mc_record->num_entries;
num_entries       317 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 		    num_entries < nve->num_max_mc_entries[proto])
num_entries       327 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	if (mc_record->num_entries != 0)
num_entries       358 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	unsigned int num_entries = 0;
num_entries       373 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 			     next_kvdl_index, mc_record->num_entries);
num_entries       383 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 					  num_entries++);
num_entries       386 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	WARN_ON(num_entries != mc_record->num_entries);
num_entries       439 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	mc_record->num_entries++;
num_entries       449 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	if (mc_record->num_entries != 1 ||
num_entries       462 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	mc_record->num_entries--;
num_entries       474 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	mc_record->num_entries--;
num_entries       479 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	if (mc_record->num_entries != 0) {
num_entries       617 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	if (mc_record->num_entries != 1)
num_entries       688 drivers/net/ethernet/mellanox/mlxsw/spectrum_nve.c 	WARN_ON(mc_record->num_entries);
num_entries      2166 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	u8 num_entries;
num_entries      2169 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	num_entries = mlxsw_reg_rauhtd_ipv4_rec_num_entries_get(rauhtd_pl,
num_entries      2172 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	num_entries++;
num_entries      2175 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	for (i = 0; i < num_entries; i++) {
num_entries      2211 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	u8 num_rec, last_rec_index, num_entries;
num_entries      2222 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	num_entries = mlxsw_reg_rauhtd_ipv4_rec_num_entries_get(rauhtd_pl,
num_entries      2224 drivers/net/ethernet/mellanox/mlxsw/spectrum_router.c 	if (++num_entries == MLXSW_REG_RAUHTD_IPV4_ENT_PER_REC)
num_entries        22 drivers/net/ethernet/mscc/ocelot_flower.c 	if (f->rule->action.num_entries != 1)
num_entries      3726 drivers/net/ethernet/neterion/s2io.c 	size = nic->num_entries * sizeof(struct msix_entry);
num_entries      3736 drivers/net/ethernet/neterion/s2io.c 	size = nic->num_entries * sizeof(struct s2io_msix_entry);
num_entries      3744 drivers/net/ethernet/neterion/s2io.c 			+= (nic->num_entries * sizeof(struct msix_entry));
num_entries      3755 drivers/net/ethernet/neterion/s2io.c 	for (i = 1; i < nic->num_entries; i++) {
num_entries      3774 drivers/net/ethernet/neterion/s2io.c 				    nic->num_entries, nic->num_entries);
num_entries      3779 drivers/net/ethernet/neterion/s2io.c 		swstats->mem_freed += nic->num_entries *
num_entries      3782 drivers/net/ethernet/neterion/s2io.c 		swstats->mem_freed += nic->num_entries *
num_entries      3859 drivers/net/ethernet/neterion/s2io.c 	for (i = 0; i < sp->num_entries; i++) {
num_entries      3934 drivers/net/ethernet/neterion/s2io.c 			swstats->mem_freed += sp->num_entries *
num_entries      3939 drivers/net/ethernet/neterion/s2io.c 			swstats->mem_freed += sp->num_entries *
num_entries      6930 drivers/net/ethernet/neterion/s2io.c 		for (i = 0; i < sp->num_entries; i++) {
num_entries      7896 drivers/net/ethernet/neterion/s2io.c 		sp->num_entries = config->rx_ring_num + 1;
num_entries      7979 drivers/net/ethernet/neterion/s2io.c 		sp->num_entries = config->rx_ring_num + 1;
num_entries       945 drivers/net/ethernet/neterion/s2io.h 	int num_entries;
num_entries       267 drivers/net/ethernet/netronome/nfp/flower/action.c 	int num_act = flow->rule->action.num_entries;
num_entries      1160 drivers/net/ethernet/netronome/nfp/flower/action.c 	if (current_act_idx == flow_act->num_entries)
num_entries        81 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	unsigned int i, num_entries, entry_sz;
num_entries        92 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	num_entries = n;
num_entries        95 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 				   num_entries * sizeof(pf->shared_bufs[0]),
num_entries       100 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	entry_sz = nfp_cpp_area_size(sb_desc_area) / num_entries;
num_entries       102 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	pf->shared_bufs = kmalloc_array(num_entries, sizeof(pf->shared_bufs[0]),
num_entries       109 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	for (i = 0; i < num_entries; i++) {
num_entries       125 drivers/net/ethernet/netronome/nfp/nfp_shared_buf.c 	pf->num_shared_bufs = num_entries;
num_entries       426 drivers/net/ethernet/qlogic/netxen/netxen_nic.h 	uint32_t	num_entries;
num_entries       571 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	__le32 entries = cpu_to_le32(directory->num_entries);
num_entries       601 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	entries = cpu_to_le32(directory->num_entries);
num_entries       697 drivers/net/ethernet/qlogic/netxen/netxen_nic_init.c 	entries = cpu_to_le32(ptab_descr->num_entries);
num_entries       362 drivers/net/ethernet/qlogic/qed/qed_dcbx.c 	int num_entries;
num_entries       375 drivers/net/ethernet/qlogic/qed/qed_dcbx.c 	num_entries = QED_MFW_GET_FIELD(p_app->flags, DCBX_APP_NUM_ENTRIES);
num_entries       378 drivers/net/ethernet/qlogic/qed/qed_dcbx.c 				  num_entries, dcbx_version);
num_entries       269 drivers/net/ethernet/qlogic/qed/qed_debug.c 	u32 num_entries[MAX_CHIP_IDS];
num_entries      3168 drivers/net/ethernet/qlogic/qed/qed_debug.c 		u32 num_entries;
num_entries      3173 drivers/net/ethernet/qlogic/qed/qed_debug.c 		num_entries = cond_hdr->data_size / MEM_DUMP_ENTRY_SIZE_DWORDS;
num_entries      3191 drivers/net/ethernet/qlogic/qed/qed_debug.c 		for (i = 0; i < num_entries;
num_entries      3635 drivers/net/ethernet/qlogic/qed/qed_debug.c 		u32 rss_addr, num_entries, total_dwords;
num_entries      3642 drivers/net/ethernet/qlogic/qed/qed_debug.c 		num_entries = rss_defs->num_entries[dev_data->chip_id];
num_entries      3643 drivers/net/ethernet/qlogic/qed/qed_debug.c 		total_dwords = (num_entries * rss_defs->entry_width) / 32;
num_entries      4221 drivers/net/ethernet/qlogic/qed/qed_debug.c 			  reg->num_entries > 1 || reg->start_entry > 0 ? 1 : 0);
num_entries      4346 drivers/net/ethernet/qlogic/qed/qed_debug.c 			if (cond_regs[reg_id].num_entries > num_reg_entries)
num_entries      4347 drivers/net/ethernet/qlogic/qed/qed_debug.c 				num_reg_entries = cond_regs[reg_id].num_entries;
num_entries      4391 drivers/net/ethernet/qlogic/qed/qed_debug.c 				if (reg->num_entries > 1 ||
num_entries      2118 drivers/net/ethernet/qlogic/qed/qed_hsi.h 	u16 num_entries;
num_entries       227 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	__le32	num_entries;
num_entries       412 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	u32	num_entries;
num_entries       429 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	u32	num_entries;
num_entries       460 drivers/net/ethernet/qlogic/qlcnic/qlcnic.h 	u32	num_entries;
num_entries       752 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 	entries = le32_to_cpu(directory->num_entries);
num_entries       781 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 	entries = le32_to_cpu(directory->num_entries);
num_entries       874 drivers/net/ethernet/qlogic/qlcnic/qlcnic_init.c 	entries = le32_to_cpu(ptab_descr->num_entries);
num_entries       293 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	fw_dump->num_entries = hdr->num_entries;
num_entries       347 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	fw_dump->num_entries = hdr->num_entries;
num_entries      1335 drivers/net/ethernet/qlogic/qlcnic/qlcnic_minidump.c 	no_entries = fw_dump->num_entries;
num_entries      6342 drivers/net/ethernet/sfc/ef10.c 	size_t num_entries = 0;
num_entries      6373 drivers/net/ethernet/sfc/ef10.c 				num_entries++) = entry;
num_entries      6385 drivers/net/ethernet/sfc/ef10.c 			     EFX_WORD_1, num_entries);
num_entries      6389 drivers/net/ethernet/sfc/ef10.c 	inlen = MC_CMD_SET_TUNNEL_ENCAP_UDP_PORTS_IN_LEN(num_entries);
num_entries      1351 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c 	rule->action.num_entries = 1;
num_entries      1479 drivers/net/ethernet/stmicro/stmmac/stmmac_selftests.c 	rule->action.num_entries = 1;
num_entries      3011 drivers/net/ethernet/sun/niu.c static u64 hash_addr_regval(unsigned long index, unsigned long num_entries)
num_entries      3013 drivers/net/ethernet/sun/niu.c 	return (u64)index | (num_entries == 1 ? HASH_TBL_ADDR_AUTOINC : 0);
num_entries      3018 drivers/net/ethernet/sun/niu.c 		     unsigned long index, unsigned long num_entries,
num_entries      3021 drivers/net/ethernet/sun/niu.c 	u64 val = hash_addr_regval(index, num_entries);
num_entries      3025 drivers/net/ethernet/sun/niu.c 	    index + num_entries > FCRAM_SIZE)
num_entries      3029 drivers/net/ethernet/sun/niu.c 	for (i = 0; i < num_entries; i++)
num_entries      3037 drivers/net/ethernet/sun/niu.c 		      unsigned long index, unsigned long num_entries,
num_entries      3040 drivers/net/ethernet/sun/niu.c 	u64 val = hash_addr_regval(index, num_entries);
num_entries      3044 drivers/net/ethernet/sun/niu.c 	    index + (num_entries * 8) > FCRAM_SIZE)
num_entries      3048 drivers/net/ethernet/sun/niu.c 	for (i = 0; i < num_entries; i++)
num_entries      1715 drivers/net/ethernet/sun/sunvnet_common.c 			   (dr->entry_size * dr->num_entries),
num_entries      1719 drivers/net/ethernet/sun/sunvnet_common.c 	dr->num_entries = 0;
num_entries      1763 drivers/net/ethernet/sun/sunvnet_common.c 	dr->num_entries = VNET_TX_RING_SIZE;
num_entries      1128 drivers/net/wireless/ath/ath6kl/debug.c 	u16 num_entries;
num_entries      1134 drivers/net/wireless/ath/ath6kl/debug.c 	num_entries = le16_to_cpu(tbl->num_entries);
num_entries      1135 drivers/net/wireless/ath/ath6kl/debug.c 	if (struct_size(tbl, info, num_entries) > len)
num_entries      1164 drivers/net/wireless/ath/ath6kl/debug.c 	u16 num_entries, i;
num_entries      1191 drivers/net/wireless/ath/ath6kl/debug.c 	num_entries = le16_to_cpu(tbl->num_entries);
num_entries      1193 drivers/net/wireless/ath/ath6kl/debug.c 	buf_len = 100 + num_entries * 100;
num_entries      1203 drivers/net/wireless/ath/ath6kl/debug.c 	for (i = 0; i < num_entries; i++) {
num_entries       656 drivers/net/wireless/ath/ath6kl/htc_pipe.c 				      int num_entries,
num_entries       665 drivers/net/wireless/ath/ath6kl/htc_pipe.c 	for (i = 0; i < num_entries; i++, rpt++) {
num_entries      1921 drivers/net/wireless/ath/ath6kl/wmi.h 	__le16 num_entries;
num_entries      3391 drivers/net/wireless/marvell/mwifiex/cfg80211.c 	int ret = 0, num_entries = 1;
num_entries      3396 drivers/net/wireless/marvell/mwifiex/cfg80211.c 		num_entries++;
num_entries      3398 drivers/net/wireless/marvell/mwifiex/cfg80211.c 	mef_entry = kcalloc(num_entries, sizeof(*mef_entry), GFP_KERNEL);
num_entries      3405 drivers/net/wireless/marvell/mwifiex/cfg80211.c 	mef_cfg.num_entries = num_entries;
num_entries      2108 drivers/net/wireless/marvell/mwifiex/fw.h 	__le16 num_entries;
num_entries       423 drivers/net/wireless/marvell/mwifiex/ioctl.h 	u16 num_entries;
num_entries      1445 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 	mef_cfg->num_entries = cpu_to_le16(mef->num_entries);
num_entries      1448 drivers/net/wireless/marvell/mwifiex/sta_cmd.c 	for (i = 0; i < mef->num_entries; i++) {
num_entries       693 drivers/net/wireless/quantenna/qtnfmac/qlink.h 	__le32 num_entries;
num_entries       181 drivers/net/wireless/quantenna/qtnfmac/qlink_util.c 	qacl->num_entries = cpu_to_le32(acl->n_acl_entries);
num_entries       222 drivers/parisc/iosapic.c static struct irt_entry *iosapic_alloc_irt(int num_entries)
num_entries       231 drivers/parisc/iosapic.c 	a = (unsigned long)kmalloc(sizeof(struct irt_entry) * num_entries + 8, GFP_KERNEL);
num_entries       268 drivers/parisc/iosapic.c 	unsigned long num_entries = 0UL;
num_entries       275 drivers/parisc/iosapic.c 		status = pdc_pat_get_irt_size(&num_entries, cell_num);
num_entries       279 drivers/parisc/iosapic.c 		BUG_ON(num_entries == 0);
num_entries       287 drivers/parisc/iosapic.c 		table = iosapic_alloc_irt(num_entries);
num_entries       308 drivers/parisc/iosapic.c 		status = pdc_pci_irt_size(&num_entries, 0);
num_entries       316 drivers/parisc/iosapic.c 		BUG_ON(num_entries == 0);
num_entries       318 drivers/parisc/iosapic.c 		table = iosapic_alloc_irt(num_entries);
num_entries       326 drivers/parisc/iosapic.c 		status = pdc_pci_irt(num_entries, 0, table);
num_entries       341 drivers/parisc/iosapic.c 		num_entries,
num_entries       344 drivers/parisc/iosapic.c 	for (i = 0 ; i < num_entries ; i++, p++) {
num_entries       356 drivers/parisc/iosapic.c 	return num_entries;
num_entries       212 drivers/pci/hotplug/ibmphp.h 	u16 num_entries;
num_entries       234 drivers/pci/hotplug/ibmphp_ebda.c 	u16 ebda_seg, num_entries, next_offset, offset, blk_id, sub_addr, re, rc_id, re_id, base;
num_entries       332 drivers/pci/hotplug/ibmphp_ebda.c 			num_entries = readw(io_mem + sub_addr);
num_entries       341 drivers/pci/hotplug/ibmphp_ebda.c 			rsrc_list_ptr->num_entries = num_entries;
num_entries       346 drivers/pci/hotplug/ibmphp_ebda.c 			debug("num of rsrc: %x\n", num_entries);
num_entries       933 drivers/pci/hotplug/ibmphp_ebda.c 	for (rsrc = 0; rsrc < rsrc_list_ptr->num_entries; rsrc++) {
num_entries       504 drivers/regulator/ti-abb-regulator.c 	int num_entries, min_uV = INT_MAX, max_uV = 0;
num_entries       512 drivers/regulator/ti-abb-regulator.c 	num_entries = of_property_count_u32_elems(dev->of_node, pname);
num_entries       513 drivers/regulator/ti-abb-regulator.c 	if (num_entries < 0) {
num_entries       515 drivers/regulator/ti-abb-regulator.c 		return num_entries;
num_entries       518 drivers/regulator/ti-abb-regulator.c 	if (!num_entries || (num_entries % num_values)) {
num_entries       523 drivers/regulator/ti-abb-regulator.c 	num_entries /= num_values;
num_entries       525 drivers/regulator/ti-abb-regulator.c 	info = devm_kcalloc(dev, num_entries, sizeof(*info), GFP_KERNEL);
num_entries       531 drivers/regulator/ti-abb-regulator.c 	volt_table = devm_kcalloc(dev, num_entries, sizeof(unsigned int),
num_entries       536 drivers/regulator/ti-abb-regulator.c 	abb->rdesc.n_voltages = num_entries;
num_entries       541 drivers/regulator/ti-abb-regulator.c 	for (i = 0; i < num_entries; i++, info++, volt_table++) {
num_entries       168 drivers/reset/core.c 				 unsigned int num_entries)
num_entries       174 drivers/reset/core.c 	for (i = 0; i < num_entries; i++) {
num_entries       189 drivers/rpmsg/qcom_glink_rpm.c 	int num_entries;
num_entries       210 drivers/rpmsg/qcom_glink_rpm.c 	num_entries = le32_to_cpu(toc->count);
num_entries       211 drivers/rpmsg/qcom_glink_rpm.c 	if (num_entries > RPM_TOC_MAX_ENTRIES) {
num_entries       216 drivers/rpmsg/qcom_glink_rpm.c 	for (i = 0; i < num_entries; i++) {
num_entries       708 drivers/s390/net/qeth_core_mpc.h 	__u8 num_entries;
num_entries       716 drivers/s390/net/qeth_core_mpc.h 	__u8 num_entries;
num_entries       754 drivers/s390/net/qeth_core_mpc.h 	__u16 num_entries;
num_entries      1206 drivers/s390/net/qeth_l2_main.c 	extrasize = sizeof(struct qeth_sbp_port_entry) * qports->num_entries;
num_entries      1247 drivers/s390/net/qeth_l2_main.c 		for (i = 0; i < data->hostevs.num_entries; i++) {
num_entries      1278 drivers/s390/net/qeth_l2_main.c 						hostevs->num_entries;
num_entries      1488 drivers/s390/net/qeth_l2_main.c 	if (qports->num_entries > 0) {
num_entries      1463 drivers/scsi/aic7xxx/aic79xx.h 					   u_int num_entries,
num_entries      9629 drivers/scsi/aic7xxx/aic79xx_core.c ahd_print_register(const ahd_reg_parse_entry_t *table, u_int num_entries,
num_entries      9650 drivers/scsi/aic7xxx/aic79xx_core.c 		for (entry = 0; entry < num_entries; entry++) {
num_entries      9664 drivers/scsi/aic7xxx/aic79xx_core.c 		if (entry >= num_entries)
num_entries      1269 drivers/scsi/aic7xxx/aic7xxx.h 					   u_int num_entries,
num_entries      7087 drivers/scsi/aic7xxx/aic7xxx_core.c ahc_print_register(const ahc_reg_parse_entry_t *table, u_int num_entries,
num_entries      7108 drivers/scsi/aic7xxx/aic7xxx_core.c 		for (entry = 0; entry < num_entries; entry++) {
num_entries      7122 drivers/scsi/aic7xxx/aic7xxx_core.c 		if (entry >= num_entries)
num_entries       383 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 		       symbol_node_t *regnode, u_int num_entries)
num_entries       396 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 		if (num_entries != 0)
num_entries       413 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 			num_entries != 0 ? regnode->symbol->name : "NULL",
num_entries       414 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 			num_entries != 0 ? "_parse_table" : "",
num_entries       415 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 			num_entries,
num_entries       548 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 			int		 num_entries;
num_entries       550 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 			num_entries = 0;
num_entries       556 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 				if (num_entries == 0)
num_entries       561 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 				num_entries++;
num_entries       565 drivers/scsi/aic7xxx/aicasm/aicasm_symbol.c 					       curnode, num_entries);
num_entries      1742 drivers/scsi/be2iscsi/be_main.c 		if (nbuf == pasync_ctx->num_entries) {
num_entries      1747 drivers/scsi/be2iscsi/be_main.c 		if (++pi == pasync_ctx->num_entries)
num_entries      2730 drivers/scsi/be2iscsi/be_main.c 			pasync_ctx->num_entries = BEISCSI_ASYNC_HDQ_SIZE(phba,
num_entries      3732 drivers/scsi/be2iscsi/be_main.c 			nbufs = phwi_context->pasync_ctx[ulp_num]->num_entries;
num_entries       593 drivers/scsi/be2iscsi/be_main.h 	u16 num_entries;
num_entries       945 drivers/scsi/bfa/bfa_fc.h 	u32        num_entries:16;
num_entries       793 drivers/scsi/bfa/bfa_fcbuild.c 	rpsc_acc->num_entries = cpu_to_be16(1);
num_entries      3175 drivers/scsi/bfa/bfa_fcs_lport.c 	u32		num_entries;
num_entries      3196 drivers/scsi/bfa/bfa_fcs_lport.c 		num_entries = be32_to_cpu(gmal_resp->ms_len);
num_entries      3197 drivers/scsi/bfa/bfa_fcs_lport.c 		if (num_entries == 0) {
num_entries      3209 drivers/scsi/bfa/bfa_fcs_lport.c 		while (num_entries > 0) {
num_entries      3229 drivers/scsi/bfa/bfa_fcs_lport.c 				--num_entries;
num_entries      5618 drivers/scsi/bfa/bfa_fcs_lport.c 	int             num_entries;
num_entries      5623 drivers/scsi/bfa/bfa_fcs_lport.c 	num_entries =
num_entries      5627 drivers/scsi/bfa/bfa_fcs_lport.c 	bfa_trc(port->fcs, num_entries);
num_entries      5633 drivers/scsi/bfa/bfa_fcs_lport.c 	for (i = 0; i < num_entries; i++) {
num_entries       563 drivers/scsi/bfa/bfi.h 	u16	num_entries;
num_entries      1454 drivers/scsi/cxlflash/main.c 	int num_entries = 0;
num_entries      1458 drivers/scsi/cxlflash/main.c 	num_entries = process_hrrq(hwq, &doneq, budget);
num_entries      1459 drivers/scsi/cxlflash/main.c 	if (num_entries < budget)
num_entries      1465 drivers/scsi/cxlflash/main.c 	return num_entries;
num_entries      1481 drivers/scsi/cxlflash/main.c 	int num_entries = 0;
num_entries      1497 drivers/scsi/cxlflash/main.c 	num_entries = process_hrrq(hwq, &doneq, -1);
num_entries      1500 drivers/scsi/cxlflash/main.c 	if (num_entries == 0)
num_entries       120 drivers/scsi/dpt/dpti_i2o.h 	u8	num_entries;
num_entries      1073 drivers/scsi/dpt_i2o.c 			pHba->hrt->num_entries * pHba->hrt->entry_len << 2,
num_entries      3097 drivers/scsi/dpt_i2o.c 	sys_tbl->num_entries = hba_count;
num_entries      3105 drivers/scsi/dpt_i2o.c 			sys_tbl->num_entries--;
num_entries      3276 drivers/scsi/dpt_i2o.c 		if (pHba->hrt->num_entries * pHba->hrt->entry_len << 2 > size) {
num_entries      3277 drivers/scsi/dpt_i2o.c 			int newsize = pHba->hrt->num_entries * pHba->hrt->entry_len << 2;
num_entries      1829 drivers/scsi/ipr.c 	int i, num_entries;
num_entries      1848 drivers/scsi/ipr.c 	num_entries = min_t(u32, be32_to_cpu(error->num_entries),
num_entries      1851 drivers/scsi/ipr.c 	for (i = 0; i < num_entries; i++, array_entry++) {
num_entries      2293 drivers/scsi/ipr.c 	for (i = 0, fabric = error->desc; i < error->num_entries; i++) {
num_entries      2317 drivers/scsi/ipr.c 	int i, num_entries;
num_entries      2335 drivers/scsi/ipr.c 	num_entries = min_t(u32, error->num_entries,
num_entries      2338 drivers/scsi/ipr.c 	for (i = 0; i < num_entries; i++, array_entry++) {
num_entries      2387 drivers/scsi/ipr.c 	for (i = 0, fabric = error->desc; i < error->num_entries; i++) {
num_entries      3063 drivers/scsi/ipr.c 	driver_dump->hdr.num_entries++;
num_entries      3084 drivers/scsi/ipr.c 	driver_dump->hdr.num_entries++;
num_entries      3105 drivers/scsi/ipr.c 	driver_dump->hdr.num_entries++;
num_entries      3126 drivers/scsi/ipr.c 	driver_dump->hdr.num_entries++;
num_entries      3143 drivers/scsi/ipr.c 	u32 num_entries, max_num_entries, start_off, end_off;
num_entries      3179 drivers/scsi/ipr.c 	driver_dump->hdr.num_entries = 1;
num_entries      3230 drivers/scsi/ipr.c 	num_entries = be32_to_cpu(sdt->hdr.num_entries_used);
num_entries      3232 drivers/scsi/ipr.c 	if (num_entries > max_num_entries)
num_entries      3233 drivers/scsi/ipr.c 		num_entries = max_num_entries;
num_entries      3238 drivers/scsi/ipr.c 		dump->driver_dump.hdr.len += num_entries * sizeof(struct ipr_sdt_entry);
num_entries      3244 drivers/scsi/ipr.c 	for (i = 0; i < num_entries; i++) {
num_entries      7443 drivers/scsi/ipr.c 	for (i = 0; i < mode_page->num_entries; i++) {
num_entries      7504 drivers/scsi/ipr.c 	     i < mode_page->num_entries;
num_entries      7812 drivers/scsi/ipr.c 		entries = be16_to_cpu(ioa_cfg->u.cfg_table64->hdr64.num_entries);
num_entries      7814 drivers/scsi/ipr.c 		entries = ioa_cfg->u.cfg_table->hdr.num_entries;
num_entries       439 drivers/scsi/ipr.h 	u8 num_entries;
num_entries       446 drivers/scsi/ipr.h 	__be16 num_entries;
num_entries       793 drivers/scsi/ipr.h 	u8 num_entries;
num_entries       994 drivers/scsi/ipr.h 	__be32 num_entries;
num_entries      1010 drivers/scsi/ipr.h 	u8 num_entries;
num_entries      1084 drivers/scsi/ipr.h 	__be16 num_entries;
num_entries      1098 drivers/scsi/ipr.h 	__be16 num_entries;
num_entries      1108 drivers/scsi/ipr.h 			cfg < ((fabric)->elem + be16_to_cpu((fabric)->num_entries)); \
num_entries      1114 drivers/scsi/ipr.h 	u8 num_entries;
num_entries      1121 drivers/scsi/ipr.h 	u8 num_entries;
num_entries      1255 drivers/scsi/ipr.h 	__be32 num_entries;
num_entries      1330 drivers/scsi/ipr.h 	u16 num_entries;
num_entries      1652 drivers/scsi/ipr.h 	u32 num_entries;
num_entries      3735 drivers/scsi/megaraid/megaraid_sas_fusion.c 	int num_entries;
num_entries      3745 drivers/scsi/megaraid/megaraid_sas_fusion.c 	num_entries = complete_cmd_fusion(instance, irq_ctx->MSIxIndex, irq_ctx);
num_entries      3746 drivers/scsi/megaraid/megaraid_sas_fusion.c 	if (num_entries < budget) {
num_entries      3752 drivers/scsi/megaraid/megaraid_sas_fusion.c 	return num_entries;
num_entries      1630 drivers/scsi/mpt3sas/mpt3sas_base.c 	int num_entries = 0;
num_entries      1638 drivers/scsi/mpt3sas/mpt3sas_base.c 	num_entries = _base_process_reply_queue(reply_q);
num_entries      1639 drivers/scsi/mpt3sas/mpt3sas_base.c 	if (num_entries < budget) {
num_entries      1646 drivers/scsi/mpt3sas/mpt3sas_base.c 	return num_entries;
num_entries      5509 drivers/scsi/pmcraid.c 	for (i = 0; i < le16_to_cpu(pinstance->cfg_table->num_entries); i++) {
num_entries       361 drivers/scsi/pmcraid.h 	__le16 num_entries;
num_entries        94 drivers/scsi/qla2xxx/qla_bsg.c 	for (i = 0; i < pri_cfg->num_entries; i++) {
num_entries      2069 drivers/scsi/qla2xxx/qla_fw.h 	uint16_t num_entries;   /* Number of entries                */
num_entries      8740 drivers/scsi/qla2xxx/qla_init.c 	entries = ha->fcp_prio_cfg->num_entries;
num_entries      1566 drivers/scsi/qla2xxx/qla_nx.c 	__le32 entries = cpu_to_le32(directory->num_entries);
num_entries      1862 drivers/scsi/qla2xxx/qla_nx.c 	entries = cpu_to_le32(ptab_desc->num_entries);
num_entries       804 drivers/scsi/qla2xxx/qla_nx.h 	uint32_t	num_entries;
num_entries      3620 drivers/scsi/qla2xxx/qla_sup.c 	len = ha->fcp_prio_cfg->num_entries * FCP_PRIO_CFG_ENTRY_SIZE;
num_entries       150 drivers/scsi/qla4xxx/ql4_os.c 				 uint32_t *num_entries, char *buf);
num_entries       736 drivers/scsi/qla4xxx/ql4_os.c 				  uint32_t *num_entries, char *buf)
num_entries       752 drivers/scsi/qla4xxx/ql4_os.c 			__func__, *num_entries, chap_tbl_idx);
num_entries       785 drivers/scsi/qla4xxx/ql4_os.c 		if (valid_chap_entries == *num_entries)
num_entries       795 drivers/scsi/qla4xxx/ql4_os.c 	*num_entries = valid_chap_entries;
num_entries      3041 drivers/scsi/scsi_transport_iscsi.c 	chap_buf_size = (ev->u.get_chap.num_entries * sizeof(*chap_rec));
num_entries      3069 drivers/scsi/scsi_transport_iscsi.c 		evchap->u.get_chap.num_entries = ev->u.get_chap.num_entries;
num_entries      3074 drivers/scsi/scsi_transport_iscsi.c 				    &evchap->u.get_chap.num_entries, buf);
num_entries       168 drivers/soc/qcom/smem.c 	__le32 num_entries;
num_entries       712 drivers/soc/qcom/smem.c 	info = (struct smem_info *)&ptable->entry[ptable->num_entries];
num_entries       784 drivers/soc/qcom/smem.c 	for (i = 0; i < le32_to_cpu(ptable->num_entries); i++) {
num_entries       830 drivers/soc/qcom/smem.c 	for (i = 0; i < le32_to_cpu(ptable->num_entries); i++) {
num_entries        81 drivers/soc/qcom/smsm.c 	u32 num_entries;
num_entries       428 drivers/soc/qcom/smsm.c 		u32 num_entries;
num_entries       440 drivers/soc/qcom/smsm.c 		smsm->num_entries = SMSM_DEFAULT_NUM_ENTRIES;
num_entries       445 drivers/soc/qcom/smsm.c 	smsm->num_entries = info->num_entries;
num_entries       450 drivers/soc/qcom/smsm.c 		smsm->num_entries, smsm->num_hosts);
num_entries       478 drivers/soc/qcom/smsm.c 				     smsm->num_entries,
num_entries       513 drivers/soc/qcom/smsm.c 			      smsm->num_entries * sizeof(u32));
num_entries       526 drivers/soc/qcom/smsm.c 	size = smsm->num_entries * smsm->num_hosts * sizeof(u32);
num_entries       556 drivers/soc/qcom/smsm.c 		if (ret || id >= smsm->num_entries) {
num_entries       581 drivers/soc/qcom/smsm.c 	for (id = 0; id < smsm->num_entries; id++)
num_entries       595 drivers/soc/qcom/smsm.c 	for (id = 0; id < smsm->num_entries; id++)
num_entries      1365 drivers/staging/comedi/drivers/cb_pcidas64.c 				      unsigned int num_entries)
num_entries      1374 drivers/staging/comedi/drivers/cb_pcidas64.c 	if (num_entries < increment_size)
num_entries      1375 drivers/staging/comedi/drivers/cb_pcidas64.c 		num_entries = increment_size;
num_entries      1376 drivers/staging/comedi/drivers/cb_pcidas64.c 	if (num_entries > fifo->max_segment_length)
num_entries      1377 drivers/staging/comedi/drivers/cb_pcidas64.c 		num_entries = fifo->max_segment_length;
num_entries      1380 drivers/staging/comedi/drivers/cb_pcidas64.c 	num_increments = DIV_ROUND_CLOSEST(num_entries, increment_size);
num_entries       549 drivers/staging/exfat/exfat.h 				  s32 entry, s32 num_entries,
num_entries       553 drivers/staging/exfat/exfat.h 				  struct uni_name_t *p_uniname, s32 num_entries,
num_entries       557 drivers/staging/exfat/exfat.h 				    s32 offset, s32 num_entries);
num_entries       652 drivers/staging/exfat/exfat.h 	u32 num_entries;
num_entries       850 drivers/staging/exfat/exfat.h 			   s32 entry, s32 num_entries,
num_entries       854 drivers/staging/exfat/exfat.h 			     s32 entry, s32 num_entries,
num_entries       865 drivers/staging/exfat/exfat.h 			  s32 entry, s32 order, s32 num_entries);
num_entries       867 drivers/staging/exfat/exfat.h 			    s32 entry, s32 order, s32 num_entries);
num_entries       885 drivers/staging/exfat/exfat.h 				   struct chain_t *p_dir, s32 num_entries);
num_entries       887 drivers/staging/exfat/exfat.h 		     s32 num_entries);
num_entries       889 drivers/staging/exfat/exfat.h 		       struct uni_name_t *p_uniname, s32 num_entries,
num_entries       892 drivers/staging/exfat/exfat.h 			 struct uni_name_t *p_uniname, s32 num_entries,
num_entries      1287 drivers/staging/exfat/exfat_core.c 			      s32 entry, s32 num_entries,
num_entries      1307 drivers/staging/exfat/exfat_core.c 	if ((--num_entries) > 0) {
num_entries      1311 drivers/staging/exfat/exfat_core.c 		for (i = 1; i < num_entries; i++) {
num_entries      1338 drivers/staging/exfat/exfat_core.c 				s32 entry, s32 num_entries,
num_entries      1354 drivers/staging/exfat/exfat_core.c 	file_ep->num_ext = (u8)(num_entries - 1);
num_entries      1366 drivers/staging/exfat/exfat_core.c 	for (i = 2; i < num_entries; i++) {
num_entries      1487 drivers/staging/exfat/exfat_core.c 		s32 entry, s32 order, s32 num_entries)
num_entries      1494 drivers/staging/exfat/exfat_core.c 	for (i = num_entries - 1; i >= order; i--) {
num_entries      1505 drivers/staging/exfat/exfat_core.c 		s32 entry, s32 order, s32 num_entries)
num_entries      1512 drivers/staging/exfat/exfat_core.c 	for (i = order; i < num_entries; i++) {
num_entries      1525 drivers/staging/exfat/exfat_core.c 	int i, num_entries;
num_entries      1538 drivers/staging/exfat/exfat_core.c 	num_entries = (s32)file_ep->num_ext + 1;
num_entries      1542 drivers/staging/exfat/exfat_core.c 	for (i = 1; i < num_entries; i++) {
num_entries      1566 drivers/staging/exfat/exfat_core.c 	for (i = 0; i < es->num_entries; i++) {
num_entries      1701 drivers/staging/exfat/exfat_core.c 	u8 num_entries;
num_entries      1735 drivers/staging/exfat/exfat_core.c 		num_entries = ((struct file_dentry_t *)ep)->num_ext + 1;
num_entries      1737 drivers/staging/exfat/exfat_core.c 		num_entries = type;
num_entries      1739 drivers/staging/exfat/exfat_core.c 	bufsize = offsetof(struct entry_set_cache_t, __buf) + (num_entries) *
num_entries      1742 drivers/staging/exfat/exfat_core.c 		 bufsize, num_entries);
num_entries      1747 drivers/staging/exfat/exfat_core.c 	es->num_entries = num_entries;
num_entries      1754 drivers/staging/exfat/exfat_core.c 	while (num_entries) {
num_entries      1805 drivers/staging/exfat/exfat_core.c 		if (--num_entries == 0)
num_entries      1839 drivers/staging/exfat/exfat_core.c 		   es->alloc_flag, es->num_entries, &es->__buf);
num_entries      1857 drivers/staging/exfat/exfat_core.c 	s32 num_entries, buf_off = (off - es->offset);
num_entries      1866 drivers/staging/exfat/exfat_core.c 	num_entries = count;
num_entries      1868 drivers/staging/exfat/exfat_core.c 	while (num_entries) {
num_entries      1873 drivers/staging/exfat/exfat_core.c 				     num_entries);
num_entries      1884 drivers/staging/exfat/exfat_core.c 		num_entries -= copy_entries;
num_entries      1886 drivers/staging/exfat/exfat_core.c 		if (num_entries) {
num_entries      1917 drivers/staging/exfat/exfat_core.c 						    es->num_entries);
num_entries      1932 drivers/staging/exfat/exfat_core.c 	if (ep + count  > ((struct dentry_t *)&(es->__buf)) + es->num_entries)
num_entries      1961 drivers/staging/exfat/exfat_core.c 				   struct chain_t *p_dir, s32 num_entries)
num_entries      2026 drivers/staging/exfat/exfat_core.c 			if (num_empty >= num_entries) {
num_entries      2031 drivers/staging/exfat/exfat_core.c 					return dentry - (num_entries - 1);
num_entries      2054 drivers/staging/exfat/exfat_core.c s32 find_empty_entry(struct inode *inode, struct chain_t *p_dir, s32 num_entries)
num_entries      2067 drivers/staging/exfat/exfat_core.c 		return search_deleted_or_unused_entry(sb, p_dir, num_entries);
num_entries      2069 drivers/staging/exfat/exfat_core.c 	while ((dentry = search_deleted_or_unused_entry(sb, p_dir, num_entries)) < 0) {
num_entries      2146 drivers/staging/exfat/exfat_core.c 		       struct uni_name_t *p_uniname, s32 num_entries,
num_entries      2244 drivers/staging/exfat/exfat_core.c 			 struct uni_name_t *p_uniname, s32 num_entries,
num_entries      2303 drivers/staging/exfat/exfat_core.c 					if ((num_empty >= num_entries) || (entry_type == TYPE_UNUSED))
num_entries      2567 drivers/staging/exfat/exfat_core.c 	s32 ret, num_entries;
num_entries      2572 drivers/staging/exfat/exfat_core.c 	num_entries = p_fs->fs_func->calc_num_entries(p_uniname);
num_entries      2573 drivers/staging/exfat/exfat_core.c 	if (num_entries == 0)
num_entries      2590 drivers/staging/exfat/exfat_core.c 				num_entries = 1;
num_entries      2593 drivers/staging/exfat/exfat_core.c 		if (num_entries > 1)
num_entries      2597 drivers/staging/exfat/exfat_core.c 	*entries = num_entries;
num_entries      2654 drivers/staging/exfat/exfat_core.c 	if (!es || es->num_entries < 3) {
num_entries      2668 drivers/staging/exfat/exfat_core.c 	for (i = 2; i < es->num_entries; i++, ep++) {
num_entries      3203 drivers/staging/exfat/exfat_core.c 	s32 ret, dentry, num_entries;
num_entries      3211 drivers/staging/exfat/exfat_core.c 	ret = get_num_entries_and_dos_name(sb, p_dir, p_uniname, &num_entries,
num_entries      3217 drivers/staging/exfat/exfat_core.c 	dentry = find_empty_entry(inode, p_dir, num_entries);
num_entries      3283 drivers/staging/exfat/exfat_core.c 	ret = fs_func->init_ext_entry(sb, p_dir, dentry, num_entries, p_uniname,
num_entries      3308 drivers/staging/exfat/exfat_core.c 	s32 ret, dentry, num_entries;
num_entries      3314 drivers/staging/exfat/exfat_core.c 	ret = get_num_entries_and_dos_name(sb, p_dir, p_uniname, &num_entries,
num_entries      3320 drivers/staging/exfat/exfat_core.c 	dentry = find_empty_entry(inode, p_dir, num_entries);
num_entries      3333 drivers/staging/exfat/exfat_core.c 	ret = fs_func->init_ext_entry(sb, p_dir, dentry, num_entries, p_uniname,
num_entries      3357 drivers/staging/exfat/exfat_core.c 	s32 num_entries;
num_entries      3371 drivers/staging/exfat/exfat_core.c 	num_entries = fs_func->count_ext_entries(sb, p_dir, entry, ep);
num_entries      3372 drivers/staging/exfat/exfat_core.c 	if (num_entries < 0) {
num_entries      3376 drivers/staging/exfat/exfat_core.c 	num_entries++;
num_entries      3381 drivers/staging/exfat/exfat_core.c 	fs_func->delete_dir_entry(sb, p_dir, entry, 0, num_entries);
num_entries       549 drivers/staging/exfat/exfat_super.c 	int ret, dentry, num_entries;
num_entries       572 drivers/staging/exfat/exfat_super.c 	ret = get_num_entries_and_dos_name(sb, &dir, &uni_name, &num_entries,
num_entries       578 drivers/staging/exfat/exfat_super.c 	dentry = p_fs->fs_func->find_dir_entry(sb, &dir, &uni_name, num_entries,
num_entries      1229 drivers/staging/exfat/exfat_super.c 	int num_entries;
num_entries      1323 drivers/staging/exfat/exfat_super.c 		num_entries = p_fs->fs_func->count_ext_entries(sb, p_dir,
num_entries      1325 drivers/staging/exfat/exfat_super.c 		if (num_entries < 0)
num_entries      1328 drivers/staging/exfat/exfat_super.c 						num_entries + 1);
num_entries       364 drivers/staging/fsl-dpaa2/ethsw/dpsw-cmd.h 	__le16 num_entries;
num_entries      1007 drivers/staging/fsl-dpaa2/ethsw/dpsw.c 		  u16 *num_entries)
num_entries      1029 drivers/staging/fsl-dpaa2/ethsw/dpsw.c 	*num_entries = le16_to_cpu(rsp_params->num_entries);
num_entries       491 drivers/staging/fsl-dpaa2/ethsw/dpsw.h 		  u16 *num_entries);
num_entries       303 drivers/staging/gasket/gasket_page_table.c 				     uint num_entries)
num_entries       307 drivers/staging/gasket/gasket_page_table.c 	for (i = 0; i < num_entries; i++) {
num_entries       909 drivers/staging/gasket/gasket_page_table.c 					 ulong dev_addr, uint num_entries)
num_entries       916 drivers/staging/gasket/gasket_page_table.c 	remain = num_entries;
num_entries      1814 drivers/target/sbp/sbp_target.c 	int num_luns, num_entries, idx = 0, mgt_agt_addr, ret;
num_entries      1839 drivers/target/sbp/sbp_target.c 	num_entries = ARRAY_SIZE(sbp_unit_directory_template) + 4 + num_luns;
num_entries      1842 drivers/target/sbp/sbp_target.c 		num_entries++;
num_entries      1845 drivers/target/sbp/sbp_target.c 	data = kcalloc((num_entries + 4), sizeof(u32), GFP_KERNEL);
num_entries      1850 drivers/target/sbp/sbp_target.c 	data[idx++] = num_entries << 16;
num_entries       513 drivers/tee/optee/call.c static size_t get_pages_list_size(size_t num_entries)
num_entries       515 drivers/tee/optee/call.c 	int pages = DIV_ROUND_UP(num_entries, PAGELIST_ENTRIES_PER_PAGE);
num_entries       520 drivers/tee/optee/call.c u64 *optee_allocate_pages_list(size_t num_entries)
num_entries       522 drivers/tee/optee/call.c 	return alloc_pages_exact(get_pages_list_size(num_entries), GFP_KERNEL);
num_entries       525 drivers/tee/optee/call.c void optee_free_pages_list(void *list, size_t num_entries)
num_entries       527 drivers/tee/optee/call.c 	free_pages_exact(list, get_pages_list_size(num_entries));
num_entries       120 drivers/tee/optee/optee_private.h 	size_t num_entries;
num_entries       171 drivers/tee/optee/optee_private.h u64 *optee_allocate_pages_list(size_t num_entries);
num_entries       172 drivers/tee/optee/optee_private.h void optee_free_pages_list(void *array, size_t num_entries);
num_entries       260 drivers/tee/optee/rpc.c 		call_ctx->num_entries = page_num;
num_entries       345 drivers/tee/optee/rpc.c 				      call_ctx->num_entries);
num_entries       347 drivers/tee/optee/rpc.c 		call_ctx->num_entries = 0;
num_entries      1813 drivers/usb/host/xhci-mem.c 	erst->num_entries = evt_ring->num_segs;
num_entries      1832 drivers/usb/host/xhci-mem.c 	size = sizeof(struct xhci_erst_entry) * (erst->num_entries);
num_entries      1629 drivers/usb/host/xhci.h 	unsigned int		num_entries;
num_entries       435 drivers/xen/xen-acpi-processor.c 			if (acpi_psd[i].num_entries) {
num_entries      1810 fs/btrfs/ctree.h 		   num_entries, 64);
num_entries        55 fs/btrfs/delayed-ref.c 	u64 num_entries =
num_entries        56 fs/btrfs/delayed-ref.c 		atomic_read(&trans->transaction->delayed_refs.num_entries);
num_entries        62 fs/btrfs/delayed-ref.c 	val = num_entries * avg_runtime;
num_entries       427 fs/btrfs/delayed-ref.c 	atomic_dec(&delayed_refs->num_entries);
num_entries       583 fs/btrfs/delayed-ref.c 	atomic_dec(&delayed_refs->num_entries);
num_entries       641 fs/btrfs/delayed-ref.c 	atomic_inc(&root->num_entries);
num_entries       844 fs/btrfs/delayed-ref.c 		atomic_inc(&delayed_refs->num_entries);
num_entries       151 fs/btrfs/delayed-ref.h 	atomic_t num_entries;
num_entries      4276 fs/btrfs/disk-io.c 	if (atomic_read(&delayed_refs->num_entries) == 0) {
num_entries      4301 fs/btrfs/disk-io.c 			atomic_dec(&delayed_refs->num_entries);
num_entries      1942 fs/btrfs/extent-tree.c 		atomic_dec(&delayed_refs->num_entries);
num_entries      2180 fs/btrfs/extent-tree.c 		count = atomic_read(&delayed_refs->num_entries) * 2;
num_entries       677 fs/btrfs/free-space-cache.c 	u64 num_entries;
num_entries       704 fs/btrfs/free-space-cache.c 	num_entries = btrfs_free_space_entries(leaf, header);
num_entries       723 fs/btrfs/free-space-cache.c 	if (!num_entries)
num_entries       744 fs/btrfs/free-space-cache.c 	while (num_entries) {
num_entries       795 fs/btrfs/free-space-cache.c 		num_entries--;
num_entries       207 fs/btrfs/raid56.c 	int num_entries = 1 << BTRFS_STRIPE_HASH_TABLE_BITS;
num_entries       221 fs/btrfs/raid56.c 	table_size = sizeof(*table) + sizeof(*h) * num_entries;
num_entries       231 fs/btrfs/raid56.c 	for (i = 0; i < num_entries; i++) {
num_entries       241 fs/btrfs/transaction.c 	atomic_set(&cur_trans->delayed_refs.num_entries, 0);
num_entries      2037 fs/ceph/mds_client.c 	int order, num_entries;
num_entries      2040 fs/ceph/mds_client.c 	num_entries = ci->i_files + ci->i_subdirs;
num_entries      2042 fs/ceph/mds_client.c 	num_entries = max(num_entries, 1);
num_entries      2043 fs/ceph/mds_client.c 	num_entries = min(num_entries, opt->max_readdir);
num_entries      2045 fs/ceph/mds_client.c 	order = get_order(size * num_entries);
num_entries      2057 fs/ceph/mds_client.c 	num_entries = (PAGE_SIZE << order) / size;
num_entries      2058 fs/ceph/mds_client.c 	num_entries = min(num_entries, opt->max_readdir);
num_entries      2061 fs/ceph/mds_client.c 	req->r_num_caps = num_entries + 1;
num_entries      2062 fs/ceph/mds_client.c 	req->r_args.readdir.max_entries = cpu_to_le32(num_entries);
num_entries      4330 fs/cifs/smb2pdu.c 			num_entries(srch_inf->srch_entries_start, end_of_smb,
num_entries      2079 fs/nfs/flexfilelayout/flexfilelayout.c 		unsigned int num_entries)
num_entries      2083 fs/nfs/flexfilelayout/flexfilelayout.c 	for (i = 0; i < num_entries; i++) {
num_entries      2289 fs/ocfs2/dir.c 				     int dx_inline, u32 num_entries,
num_entries      2336 fs/ocfs2/dir.c 	dx_root->dr_num_entries = cpu_to_le32(num_entries);
num_entries      1423 fs/ocfs2/xattr.c 				       int num_entries)
num_entries      1432 fs/ocfs2/xattr.c 		(num_entries * sizeof(struct ocfs2_xattr_entry)) -
num_entries        61 include/acpi/cppc_acpi.h 	int num_entries;
num_entries        99 include/acpi/processor.h 	u64 num_entries;
num_entries       140 include/acpi/processor.h 	u64 num_entries;
num_entries       326 include/drm/bridge/mhl.h 	u8 num_entries;
num_entries       335 include/drm/bridge/mhl.h 	u8 num_entries;
num_entries        27 include/drm/intel-gtt.h void intel_gtt_clear_range(unsigned int first_entry, unsigned int num_entries);
num_entries       952 include/linux/efi.h 	u32 num_entries;
num_entries      1059 include/linux/efi.h extern phys_addr_t __init efi_memmap_alloc(unsigned int num_entries);
num_entries        41 include/linux/mlx5/cmd.h 	u16	num_entries;
num_entries       473 include/linux/mlx5/qp.h 	__be16		num_entries;
num_entries      2894 include/linux/platform_data/cros_ec_commands.h 			uint32_t num_entries;
num_entries        89 include/linux/reset-controller.h 				 unsigned int num_entries);
num_entries       350 include/media/v4l2-subdev.h 	unsigned short num_entries;
num_entries       220 include/net/flow_offload.h 	unsigned int			num_entries;
num_entries       226 include/net/flow_offload.h 	return action->num_entries;
num_entries       237 include/net/flow_offload.h 	return action->num_entries == 1;
num_entries       241 include/net/flow_offload.h         for (__i = 0, __act = &(__actions)->entries[0]; __i < (__actions)->num_entries; __act = &(__actions)->entries[++__i])
num_entries       382 include/net/nfc/nci.h 	__u8				num_entries;
num_entries       183 include/net/pkt_sched.h 	size_t num_entries;
num_entries      2270 include/rdma/ib_verbs.h 	int (*poll_cq)(struct ib_cq *cq, int num_entries, struct ib_wc *wc);
num_entries      3874 include/rdma/ib_verbs.h static inline int ib_poll_cq(struct ib_cq *cq, int num_entries,
num_entries      3877 include/rdma/ib_verbs.h 	return cq->device->ops.poll_cq(cq, num_entries, wc);
num_entries       232 include/rdma/rdma_vt.h 	int num_entries;
num_entries       200 include/scsi/iscsi_if.h 			uint32_t	num_entries; /* number of CHAP entries
num_entries       140 include/scsi/scsi_transport_iscsi.h 			 uint32_t *num_entries, char *buf);
num_entries       450 include/uapi/linux/btrfs_tree.h 	__le64 num_entries;
num_entries       194 include/uapi/linux/i2o-dev.h 	__u16 num_entries;
num_entries       150 include/uapi/linux/netfilter_arp/arp_tables.h 	unsigned int num_entries;
num_entries       166 include/uapi/linux/netfilter_arp/arp_tables.h 	unsigned int num_entries;
num_entries       172 include/uapi/linux/netfilter_ipv4/ip_tables.h 	unsigned int num_entries;
num_entries       188 include/uapi/linux/netfilter_ipv4/ip_tables.h 	unsigned int num_entries;
num_entries       212 include/uapi/linux/netfilter_ipv6/ip6_tables.h 	unsigned int num_entries;
num_entries       228 include/uapi/linux/netfilter_ipv6/ip6_tables.h 	unsigned int num_entries;
num_entries       405 include/xen/interface/platform.h 	uint64_t num_entries;
num_entries       136 kernel/bpf/hashtab.c 	u32 num_entries = htab->map.max_entries;
num_entries       140 kernel/bpf/hashtab.c 		num_entries += num_possible_cpus();
num_entries       142 kernel/bpf/hashtab.c 	htab->elems = bpf_map_area_alloc(htab->elem_size * num_entries,
num_entries       150 kernel/bpf/hashtab.c 	for (i = 0; i < num_entries; i++) {
num_entries       179 kernel/bpf/hashtab.c 				 htab->elem_size, num_entries);
num_entries       183 kernel/bpf/hashtab.c 				       htab->elem_size, num_entries);
num_entries       338 net/batman-adv/translation-table.c 	atomic_add(v, &vlan->tt.num_entries);
num_entries       383 net/batman-adv/translation-table.c 	if (atomic_add_return(v, &vlan->tt.num_entries) == 0) {
num_entries       573 net/batman-adv/translation-table.c 		tt_local_entries += atomic_read(&vlan->tt.num_entries);
num_entries       857 net/batman-adv/translation-table.c 	u16 num_entries = 0;
num_entries       867 net/batman-adv/translation-table.c 		num_entries += atomic_read(&vlan->tt.num_entries);
num_entries       875 net/batman-adv/translation-table.c 		*tt_len = batadv_tt_len(num_entries);
num_entries       941 net/batman-adv/translation-table.c 		vlan_entries = atomic_read(&vlan->tt.num_entries);
num_entries       971 net/batman-adv/translation-table.c 		vlan_entries = atomic_read(&vlan->tt.num_entries);
num_entries      3525 net/batman-adv/translation-table.c 				  u16 num_entries)
num_entries      3537 net/batman-adv/translation-table.c 	_batadv_tt_update_changes(bat_priv, orig_node, tt_change, num_entries,
num_entries      3605 net/batman-adv/translation-table.c 				      u8 *resp_src, u16 num_entries)
num_entries      3616 net/batman-adv/translation-table.c 		   resp_src, tt_data->ttvn, num_entries,
num_entries      3633 net/batman-adv/translation-table.c 				      resp_src, num_entries);
num_entries      3635 net/batman-adv/translation-table.c 		batadv_tt_update_changes(bat_priv, orig_node, num_entries,
num_entries      4232 net/batman-adv/translation-table.c 	u16 num_entries, num_vlan;
num_entries      4249 net/batman-adv/translation-table.c 	num_entries = batadv_tt_entries(tvlv_value_len);
num_entries      4252 net/batman-adv/translation-table.c 			      num_entries, tt_data->ttvn);
num_entries       332 net/batman-adv/types.h 	atomic_t num_entries;
num_entries        69 net/core/drop_monitor.c 	u32 num_entries;
num_entries       355 net/core/drop_monitor.c 	for (i = 0; i < hw_entries->num_entries; i++) {
num_entries       450 net/core/drop_monitor.c 	for (i = 0; i < hw_entries->num_entries; i++) {
num_entries       458 net/core/drop_monitor.c 	if (WARN_ON_ONCE(hw_entries->num_entries == dm_hit_limit))
num_entries       461 net/core/drop_monitor.c 	hw_entry = &hw_entries->entries[hw_entries->num_entries];
num_entries       465 net/core/drop_monitor.c 	hw_entries->num_entries++;
num_entries        17 net/core/flow_offload.c 	rule->action.num_entries = num_actions;
num_entries       530 net/ipv4/netfilter/arp_tables.c 	newinfo->number = repl->num_entries;
num_entries       552 net/ipv4/netfilter/arp_tables.c 		if (i < repl->num_entries)
num_entries       561 net/ipv4/netfilter/arp_tables.c 	if (i != repl->num_entries)
num_entries       827 net/ipv4/netfilter/arp_tables.c 		info.num_entries = private->number;
num_entries      1054 net/ipv4/netfilter/arp_tables.c 	u32				num_entries;
num_entries      1173 net/ipv4/netfilter/arp_tables.c 	info->number = compatr->num_entries;
num_entries      1177 net/ipv4/netfilter/arp_tables.c 	ret = xt_compat_init_offsets(NFPROTO_ARP, compatr->num_entries);
num_entries      1191 net/ipv4/netfilter/arp_tables.c 	if (j != compatr->num_entries)
num_entries      1199 net/ipv4/netfilter/arp_tables.c 	newinfo->number = compatr->num_entries;
num_entries       671 net/ipv4/netfilter/ip_tables.c 	newinfo->number = repl->num_entries;
num_entries       692 net/ipv4/netfilter/ip_tables.c 		if (i < repl->num_entries)
num_entries       701 net/ipv4/netfilter/ip_tables.c 	if (i != repl->num_entries)
num_entries       984 net/ipv4/netfilter/ip_tables.c 		info.num_entries = private->number;
num_entries      1208 net/ipv4/netfilter/ip_tables.c 	u32			num_entries;
num_entries      1407 net/ipv4/netfilter/ip_tables.c 	info->number = compatr->num_entries;
num_entries      1411 net/ipv4/netfilter/ip_tables.c 	ret = xt_compat_init_offsets(AF_INET, compatr->num_entries);
num_entries      1425 net/ipv4/netfilter/ip_tables.c 	if (j != compatr->num_entries)
num_entries      1433 net/ipv4/netfilter/ip_tables.c 	newinfo->number = compatr->num_entries;
num_entries       688 net/ipv6/netfilter/ip6_tables.c 	newinfo->number = repl->num_entries;
num_entries       709 net/ipv6/netfilter/ip6_tables.c 		if (i < repl->num_entries)
num_entries       718 net/ipv6/netfilter/ip6_tables.c 	if (i != repl->num_entries)
num_entries      1000 net/ipv6/netfilter/ip6_tables.c 		info.num_entries = private->number;
num_entries      1224 net/ipv6/netfilter/ip6_tables.c 	u32			num_entries;
num_entries      1422 net/ipv6/netfilter/ip6_tables.c 	info->number = compatr->num_entries;
num_entries      1426 net/ipv6/netfilter/ip6_tables.c 	ret = xt_compat_init_offsets(AF_INET6, compatr->num_entries);
num_entries      1440 net/ipv6/netfilter/ip6_tables.c 	if (j != compatr->num_entries)
num_entries      1448 net/ipv6/netfilter/ip6_tables.c 	newinfo->number = compatr->num_entries;
num_entries        35 net/netfilter/xt_repldata.h 	tbl->repl.num_entries = nhooks + 1; \
num_entries        37 net/nfc/nci/ntf.c 	pr_debug("num_entries %d\n", ntf->num_entries);
num_entries        39 net/nfc/nci/ntf.c 	if (ntf->num_entries > NCI_MAX_NUM_CONN)
num_entries        40 net/nfc/nci/ntf.c 		ntf->num_entries = NCI_MAX_NUM_CONN;
num_entries        43 net/nfc/nci/ntf.c 	for (i = 0; i < ntf->num_entries; i++) {
num_entries        55 net/sched/sch_taprio.c 	size_t num_entries;
num_entries       860 net/sched/sch_taprio.c 	sched->num_entries = i;
num_entries      1109 net/sched/sch_taprio.c static struct tc_taprio_qopt_offload *taprio_offload_alloc(int num_entries)
num_entries      1111 net/sched/sch_taprio.c 	size_t size = sizeof(struct tc_taprio_sched_entry) * num_entries +
num_entries      1201 net/sched/sch_taprio.c 	offload->num_entries = i;
num_entries      1220 net/sched/sch_taprio.c 	offload = taprio_offload_alloc(sched->num_entries);
num_entries      1463 net/sched/sch_taprio.c 	if (new_admin->num_entries == 0) {
num_entries      1378 net/sctp/protocol.c 	int num_entries;
num_entries      1484 net/sctp/protocol.c 	num_entries = (1UL << order) * PAGE_SIZE /
num_entries      1492 net/sctp/protocol.c 	sctp_port_hashsize = rounddown_pow_of_two(num_entries);
num_entries      1504 net/sctp/protocol.c 		num_entries);
num_entries       168 scripts/sortextable.h 		int num_entries = _r(&extab_sec->sh_size) / extable_ent_size;
num_entries       169 scripts/sortextable.h 		qsort(extab_image, num_entries,
num_entries       645 sound/pci/hda/hda_local.h 			     int num_entries, const char * const *texts);
num_entries        86 sound/soc/intel/baytrail/sst-baytrail-ipc.c 	u16 num_entries;
num_entries       397 sound/soc/intel/baytrail/sst-baytrail-ipc.c 	stream->request.frame_info.num_entries = 1;