pd_idx             92 drivers/infiniband/hw/i40iw/i40iw_hmc.c 					     u32 *pd_idx,
pd_idx            100 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	*(pd_idx) = (u32)(fpm_adr / I40IW_HMC_PAGED_BP_SIZE);
pd_idx            300 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	u32 pd_idx = 0, pd_lmt = 0;
pd_idx            329 drivers/infiniband/hw/i40iw/i40iw_hmc.c 				  info->start_idx, info->count, &pd_idx, &pd_lmt);
pd_idx            343 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			pd_idx1 = max(pd_idx, (j * I40IW_HMC_MAX_BP_COUNT));
pd_idx            377 drivers/infiniband/hw/i40iw/i40iw_hmc.c 			pd_idx1 = max(pd_idx,
pd_idx            453 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	u32 pd_idx, pd_lmt, rel_pd_idx;
pd_idx            482 drivers/infiniband/hw/i40iw/i40iw_hmc.c 				  info->start_idx, info->count, &pd_idx, &pd_lmt);
pd_idx            484 drivers/infiniband/hw/i40iw/i40iw_hmc.c 	for (j = pd_idx; j < pd_lmt; j++) {
pd_idx             70 drivers/infiniband/hw/i40iw/i40iw_hmc.h #define I40IW_INVALIDATE_PF_HMC_PD(hw, sd_idx, pd_idx)                  \
pd_idx             74 drivers/infiniband/hw/i40iw/i40iw_hmc.h 		((pd_idx) << I40E_PFHMC_PDINV_PMPDIDX_SHIFT)))
pd_idx             83 drivers/infiniband/hw/i40iw/i40iw_hmc.h #define I40IW_INVALIDATE_VF_HMC_PD(hw, sd_idx, pd_idx, hmc_fn_id)        \
pd_idx             86 drivers/infiniband/hw/i40iw/i40iw_hmc.h 	      (pd_idx << I40E_PFHMC_PDINV_PMPDIDX_SHIFT)))
pd_idx            122 drivers/infiniband/hw/i40iw/i40iw_pble.c 	idx->pd_idx = (u32)(pble_rsrc->next_fpm_addr) / I40IW_HMC_PAGED_BP_SIZE;
pd_idx            123 drivers/infiniband/hw/i40iw/i40iw_pble.c 	idx->rel_pd_idx = (idx->pd_idx % I40IW_HMC_PD_CNT_IN_SD);
pd_idx            263 drivers/infiniband/hw/i40iw/i40iw_pble.c 	u32 pd_idx = info->idx.pd_idx;
pd_idx            291 drivers/infiniband/hw/i40iw/i40iw_pble.c 			status = i40iw_add_pd_table_entry(dev->hw, hmc_info, pd_idx++, &mem);
pd_idx             78 drivers/infiniband/hw/i40iw/i40iw_pble.h 	u32 pd_idx;
pd_idx            323 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 	u16 pd_idx = 0;
pd_idx            330 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd_idx = _ocrdma_pd_mgr_get_bitmap(dev, true);
pd_idx            331 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd->id = dev->pd_mgr->pd_dpp_start + pd_idx;
pd_idx            332 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd->dpp_page = dev->pd_mgr->dpp_page_index + pd_idx;
pd_idx            335 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd_idx = _ocrdma_pd_mgr_get_bitmap(dev, false);
pd_idx            336 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd->id = dev->pd_mgr->pd_norm_start + pd_idx;
pd_idx            343 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd_idx = _ocrdma_pd_mgr_get_bitmap(dev, false);
pd_idx            344 drivers/infiniband/hw/ocrdma/ocrdma_verbs.c 			pd->id = dev->pd_mgr->pd_norm_start + pd_idx;
pd_idx            506 drivers/md/raid5-cache.c 		set_bit(R5_InJournal, &sh->dev[sh->pd_idx].flags);
pd_idx            511 drivers/md/raid5-cache.c 		set_bit(R5_InJournal, &sh->dev[sh->pd_idx].flags);
pd_idx            931 drivers/md/raid5-cache.c 		if (i == sh->pd_idx || i == sh->qd_idx)
pd_idx            950 drivers/md/raid5-cache.c 					sh->sector, sh->dev[sh->pd_idx].log_checksum,
pd_idx            952 drivers/md/raid5-cache.c 		r5l_append_payload_page(log, sh->dev[sh->pd_idx].page);
pd_idx            956 drivers/md/raid5-cache.c 					sh->sector, sh->dev[sh->pd_idx].log_checksum,
pd_idx            958 drivers/md/raid5-cache.c 		r5l_append_payload_page(log, sh->dev[sh->pd_idx].page);
pd_idx           1007 drivers/md/raid5-cache.c 	if (sh->log_io || !test_bit(R5_Wantwrite, &sh->dev[sh->pd_idx].flags) ||
pd_idx           1844 drivers/md/raid5-cache.c 	r5l_recovery_read_page(log, ctx, sh->dev[sh->pd_idx].page, log_offset);
pd_idx           1845 drivers/md/raid5-cache.c 	sh->dev[sh->pd_idx].log_checksum =
pd_idx           1847 drivers/md/raid5-cache.c 	set_bit(R5_Wantwrite, &sh->dev[sh->pd_idx].flags);
pd_idx           1882 drivers/md/raid5-cache.c 		if (disk_index == sh->qd_idx || disk_index == sh->pd_idx)
pd_idx           2185 drivers/md/raid5-cache.c 			    test_bit(R5_Wantwrite, &sh->dev[sh->pd_idx].flags)) {
pd_idx           2813 drivers/md/raid5-cache.c 	if (!log || !test_bit(R5_InJournal, &sh->dev[sh->pd_idx].flags))
pd_idx           2817 drivers/md/raid5-cache.c 	clear_bit(R5_InJournal, &sh->dev[sh->pd_idx].flags);
pd_idx            163 drivers/md/raid5-ppl.c 	int count = 0, pd_idx = sh->pd_idx, i;
pd_idx            180 drivers/md/raid5-ppl.c 		srcs[count++] = sh->dev[pd_idx].page;
pd_idx            301 drivers/md/raid5-ppl.c 		if (i != sh->pd_idx && test_bit(R5_Wantwrite, &dev->flags)) {
pd_idx            338 drivers/md/raid5-ppl.c 		e->parity_disk = cpu_to_le32(sh->pd_idx);
pd_idx            367 drivers/md/raid5-ppl.c 	    !test_bit(R5_Wantwrite, &sh->dev[sh->pd_idx].flags) ||
pd_idx            368 drivers/md/raid5-ppl.c 	    !test_bit(R5_Insync, &sh->dev[sh->pd_idx].flags)) {
pd_idx            373 drivers/md/raid5-ppl.c 	log = &ppl_conf->child_logs[sh->pd_idx];
pd_idx            945 drivers/md/raid5-ppl.c 		BUG_ON(sh.pd_idx != le32_to_cpu(e->parity_disk));
pd_idx            946 drivers/md/raid5-ppl.c 		parity_rdev = conf->disks[sh.pd_idx].rdev;
pd_idx            141 drivers/md/raid5.c 	if (idx == sh->pd_idx)
pd_idx            274 drivers/md/raid5.c 				WARN_ON(test_bit(R5_InJournal, &sh->dev[sh->pd_idx].flags));
pd_idx            793 drivers/md/raid5.c 	while (dd_idx == sh->pd_idx || dd_idx == sh->qd_idx)
pd_idx           1467 drivers/md/raid5.c 		if (i == sh->qd_idx || i == sh->pd_idx ||
pd_idx           1673 drivers/md/raid5.c 	int count = 0, pd_idx = sh->pd_idx, i;
pd_idx           1677 drivers/md/raid5.c 	struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page;
pd_idx           1794 drivers/md/raid5.c 	int pd_idx = sh->pd_idx;
pd_idx           1811 drivers/md/raid5.c 		if (dev->written || i == pd_idx || i == qd_idx) {
pd_idx           1844 drivers/md/raid5.c 	int count, pd_idx = sh->pd_idx, i;
pd_idx           1856 drivers/md/raid5.c 		if (pd_idx == i)
pd_idx           1863 drivers/md/raid5.c 		set_bit(R5_Discard, &sh->dev[pd_idx].flags);
pd_idx           1875 drivers/md/raid5.c 		xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page;
pd_idx           1883 drivers/md/raid5.c 		xor_dest = sh->dev[pd_idx].page;
pd_idx           1886 drivers/md/raid5.c 			if (i != pd_idx)
pd_idx           1939 drivers/md/raid5.c 		if (sh->pd_idx == i || sh->qd_idx == i)
pd_idx           1946 drivers/md/raid5.c 		set_bit(R5_Discard, &sh->dev[sh->pd_idx].flags);
pd_idx           1999 drivers/md/raid5.c 	int pd_idx = sh->pd_idx;
pd_idx           2013 drivers/md/raid5.c 	xor_dest = sh->dev[pd_idx].page;
pd_idx           2016 drivers/md/raid5.c 		if (i == pd_idx || i == qd_idx)
pd_idx           2568 drivers/md/raid5.c 			if (sh->qd_idx >= 0 && sh->pd_idx == i)
pd_idx           2720 drivers/md/raid5.c 	int pd_idx, qd_idx;
pd_idx           2748 drivers/md/raid5.c 	pd_idx = qd_idx = -1;
pd_idx           2751 drivers/md/raid5.c 		pd_idx = data_disks;
pd_idx           2756 drivers/md/raid5.c 			pd_idx = data_disks - sector_div(stripe2, raid_disks);
pd_idx           2757 drivers/md/raid5.c 			if (*dd_idx >= pd_idx)
pd_idx           2761 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks);
pd_idx           2762 drivers/md/raid5.c 			if (*dd_idx >= pd_idx)
pd_idx           2766 drivers/md/raid5.c 			pd_idx = data_disks - sector_div(stripe2, raid_disks);
pd_idx           2767 drivers/md/raid5.c 			*dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks;
pd_idx           2770 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks);
pd_idx           2771 drivers/md/raid5.c 			*dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks;
pd_idx           2774 drivers/md/raid5.c 			pd_idx = 0;
pd_idx           2778 drivers/md/raid5.c 			pd_idx = data_disks;
pd_idx           2788 drivers/md/raid5.c 			pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks);
pd_idx           2789 drivers/md/raid5.c 			qd_idx = pd_idx + 1;
pd_idx           2790 drivers/md/raid5.c 			if (pd_idx == raid_disks-1) {
pd_idx           2793 drivers/md/raid5.c 			} else if (*dd_idx >= pd_idx)
pd_idx           2797 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks);
pd_idx           2798 drivers/md/raid5.c 			qd_idx = pd_idx + 1;
pd_idx           2799 drivers/md/raid5.c 			if (pd_idx == raid_disks-1) {
pd_idx           2802 drivers/md/raid5.c 			} else if (*dd_idx >= pd_idx)
pd_idx           2806 drivers/md/raid5.c 			pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks);
pd_idx           2807 drivers/md/raid5.c 			qd_idx = (pd_idx + 1) % raid_disks;
pd_idx           2808 drivers/md/raid5.c 			*dd_idx = (pd_idx + 2 + *dd_idx) % raid_disks;
pd_idx           2811 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks);
pd_idx           2812 drivers/md/raid5.c 			qd_idx = (pd_idx + 1) % raid_disks;
pd_idx           2813 drivers/md/raid5.c 			*dd_idx = (pd_idx + 2 + *dd_idx) % raid_disks;
pd_idx           2817 drivers/md/raid5.c 			pd_idx = 0;
pd_idx           2822 drivers/md/raid5.c 			pd_idx = data_disks;
pd_idx           2830 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks);
pd_idx           2831 drivers/md/raid5.c 			qd_idx = pd_idx + 1;
pd_idx           2832 drivers/md/raid5.c 			if (pd_idx == raid_disks-1) {
pd_idx           2835 drivers/md/raid5.c 			} else if (*dd_idx >= pd_idx)
pd_idx           2846 drivers/md/raid5.c 			pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks);
pd_idx           2847 drivers/md/raid5.c 			qd_idx = pd_idx + 1;
pd_idx           2848 drivers/md/raid5.c 			if (pd_idx == raid_disks-1) {
pd_idx           2851 drivers/md/raid5.c 			} else if (*dd_idx >= pd_idx)
pd_idx           2858 drivers/md/raid5.c 			pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks);
pd_idx           2859 drivers/md/raid5.c 			qd_idx = (pd_idx + raid_disks - 1) % raid_disks;
pd_idx           2860 drivers/md/raid5.c 			*dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks;
pd_idx           2866 drivers/md/raid5.c 			pd_idx = data_disks - sector_div(stripe2, raid_disks-1);
pd_idx           2867 drivers/md/raid5.c 			if (*dd_idx >= pd_idx)
pd_idx           2873 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks-1);
pd_idx           2874 drivers/md/raid5.c 			if (*dd_idx >= pd_idx)
pd_idx           2880 drivers/md/raid5.c 			pd_idx = data_disks - sector_div(stripe2, raid_disks-1);
pd_idx           2881 drivers/md/raid5.c 			*dd_idx = (pd_idx + 1 + *dd_idx) % (raid_disks-1);
pd_idx           2886 drivers/md/raid5.c 			pd_idx = sector_div(stripe2, raid_disks-1);
pd_idx           2887 drivers/md/raid5.c 			*dd_idx = (pd_idx + 1 + *dd_idx) % (raid_disks-1);
pd_idx           2892 drivers/md/raid5.c 			pd_idx = 0;
pd_idx           2904 drivers/md/raid5.c 		sh->pd_idx = pd_idx;
pd_idx           2935 drivers/md/raid5.c 	if (i == sh->pd_idx)
pd_idx           2943 drivers/md/raid5.c 			if (i > sh->pd_idx)
pd_idx           2948 drivers/md/raid5.c 			if (i < sh->pd_idx)
pd_idx           2950 drivers/md/raid5.c 			i -= (sh->pd_idx + 1);
pd_idx           2969 drivers/md/raid5.c 			if (sh->pd_idx == raid_disks-1)
pd_idx           2971 drivers/md/raid5.c 			else if (i > sh->pd_idx)
pd_idx           2976 drivers/md/raid5.c 			if (sh->pd_idx == raid_disks-1)
pd_idx           2980 drivers/md/raid5.c 				if (i < sh->pd_idx)
pd_idx           2982 drivers/md/raid5.c 				i -= (sh->pd_idx + 2);
pd_idx           2992 drivers/md/raid5.c 			if (sh->pd_idx == 0)
pd_idx           2996 drivers/md/raid5.c 				if (i < sh->pd_idx)
pd_idx           2998 drivers/md/raid5.c 				i -= (sh->pd_idx + 1);
pd_idx           3003 drivers/md/raid5.c 			if (i > sh->pd_idx)
pd_idx           3008 drivers/md/raid5.c 			if (i < sh->pd_idx)
pd_idx           3010 drivers/md/raid5.c 			i -= (sh->pd_idx + 1);
pd_idx           3026 drivers/md/raid5.c 	if (check != sh->sector || dummy1 != dd_idx || sh2.pd_idx != sh->pd_idx
pd_idx           3095 drivers/md/raid5.c 	int i, pd_idx = sh->pd_idx, qd_idx = sh->qd_idx, disks = sh->disks;
pd_idx           3141 drivers/md/raid5.c 		BUG_ON(!(test_bit(R5_UPTODATE, &sh->dev[pd_idx].flags) ||
pd_idx           3142 drivers/md/raid5.c 			test_bit(R5_Wantcompute, &sh->dev[pd_idx].flags)));
pd_idx           3149 drivers/md/raid5.c 			if (i == pd_idx || i == qd_idx)
pd_idx           3176 drivers/md/raid5.c 	set_bit(R5_LOCKED, &sh->dev[pd_idx].flags);
pd_idx           3177 drivers/md/raid5.c 	clear_bit(R5_UPTODATE, &sh->dev[pd_idx].flags);
pd_idx           3192 drivers/md/raid5.c 	    test_bit(R5_Insync, &sh->dev[pd_idx].flags))
pd_idx           3250 drivers/md/raid5.c 			if (i != sh->pd_idx &&
pd_idx           3470 drivers/md/raid5.c 	if (test_and_clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags))
pd_idx           3607 drivers/md/raid5.c 		if (s->failed_num[i] != sh->pd_idx &&
pd_idx           3647 drivers/md/raid5.c 		    ((sh->qd_idx >= 0 && sh->pd_idx == disk_idx) ||
pd_idx           3809 drivers/md/raid5.c 	    test_bit(R5_Discard, &sh->dev[sh->pd_idx].flags)) {
pd_idx           3811 drivers/md/raid5.c 		clear_bit(R5_Discard, &sh->dev[sh->pd_idx].flags);
pd_idx           3812 drivers/md/raid5.c 		clear_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags);
pd_idx           3894 drivers/md/raid5.c 		     i == sh->pd_idx || i == sh->qd_idx ||
pd_idx           3906 drivers/md/raid5.c 		    i != sh->pd_idx && i != sh->qd_idx &&
pd_idx           3930 drivers/md/raid5.c 			    !test_bit(R5_LOCKED, &sh->dev[sh->pd_idx].flags)) {
pd_idx           3959 drivers/md/raid5.c 			     i == sh->pd_idx || i == sh->qd_idx ||
pd_idx           3986 drivers/md/raid5.c 			    i != sh->pd_idx && i != sh->qd_idx &&
pd_idx           4048 drivers/md/raid5.c 			clear_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags);
pd_idx           4057 drivers/md/raid5.c 			dev = &sh->dev[sh->pd_idx];
pd_idx           4109 drivers/md/raid5.c 					&sh->dev[sh->pd_idx].flags);
pd_idx           4110 drivers/md/raid5.c 				sh->ops.target = sh->pd_idx;
pd_idx           4130 drivers/md/raid5.c 	int pd_idx = sh->pd_idx;
pd_idx           4170 drivers/md/raid5.c 			clear_bit(R5_UPTODATE, &sh->dev[pd_idx].flags);
pd_idx           4209 drivers/md/raid5.c 			dev = &sh->dev[pd_idx];
pd_idx           4279 drivers/md/raid5.c 						&sh->dev[pd_idx].flags);
pd_idx           4280 drivers/md/raid5.c 					*target = pd_idx;
pd_idx           4314 drivers/md/raid5.c 		if (i != sh->pd_idx && i != sh->qd_idx) {
pd_idx           4345 drivers/md/raid5.c 				if (j != sh2->pd_idx &&
pd_idx           4714 drivers/md/raid5.c 	       atomic_read(&sh->count), sh->pd_idx, sh->qd_idx,
pd_idx           4779 drivers/md/raid5.c 		BUG_ON(!test_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags) &&
pd_idx           4780 drivers/md/raid5.c 		       !test_bit(R5_Discard, &sh->dev[sh->pd_idx].flags));
pd_idx           4787 drivers/md/raid5.c 				(i == sh->pd_idx || i == sh->qd_idx ||
pd_idx           4797 drivers/md/raid5.c 				    ((i == sh->pd_idx || i == sh->qd_idx)  &&
pd_idx           4810 drivers/md/raid5.c 	pdev = &sh->dev[sh->pd_idx];
pd_idx           4811 drivers/md/raid5.c 	s.p_failed = (s.failed >= 1 && s.failed_num[0] == sh->pd_idx)
pd_idx           4812 drivers/md/raid5.c 		|| (s.failed >= 2 && s.failed_num[1] == sh->pd_idx);
pd_idx           4925 drivers/md/raid5.c 		if (test_and_clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags))
pd_idx           5523 drivers/md/raid5.c 		set_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags);
pd_idx           5529 drivers/md/raid5.c 		clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags);
pd_idx           5532 drivers/md/raid5.c 			if (d == sh->pd_idx || d == sh->qd_idx)
pd_idx           5546 drivers/md/raid5.c 			if (d == sh->pd_idx || d == sh->qd_idx)
pd_idx           5924 drivers/md/raid5.c 			if (j == sh->pd_idx)
pd_idx            206 drivers/md/raid5.h 	short			pd_idx;		/* parity disk index */
pd_idx            139 drivers/net/ethernet/intel/i40e/i40e_hmc.h #define I40E_INVALIDATE_PF_HMC_PD(hw, sd_idx, pd_idx)			\
pd_idx            142 drivers/net/ethernet/intel/i40e/i40e_hmc.h 	     ((pd_idx) << I40E_PFHMC_PDINV_PMPDIDX_SHIFT)))
pd_idx            285 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 	u32 pd_idx = 0, pd_lmt = 0;
pd_idx            332 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 				 info->start_idx, info->count, &pd_idx,
pd_idx            361 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 			pd_idx1 = max(pd_idx, (j * I40E_HMC_MAX_BP_COUNT));
pd_idx            409 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 			pd_idx1 = max(pd_idx,
pd_idx            528 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 	u32 pd_idx, pd_lmt, rel_pd_idx;
pd_idx            575 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 				 info->start_idx, info->count, &pd_idx,
pd_idx            578 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 	for (j = pd_idx; j < pd_lmt; j++) {
pd_idx            984 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 	u32 pd_idx, pd_lmt, rel_pd_idx;
pd_idx           1025 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 					 &pd_idx, &pd_lmt);
pd_idx           1026 drivers/net/ethernet/intel/i40e/i40e_lan_hmc.c 		rel_pd_idx = pd_idx % I40E_HMC_PD_CNT_IN_SD;