Lines Matching refs:pd_idx

217 	if (idx == sh->pd_idx)  in raid6_idx_to_slot()
809 while (dd_idx == sh->pd_idx || dd_idx == sh->qd_idx) in stripe_add_to_batch_list()
1356 if (i == sh->qd_idx || i == sh->pd_idx || in set_syndrome_sources()
1549 int count = 0, pd_idx = sh->pd_idx, i; in ops_run_prexor5() local
1553 struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_prexor5()
1660 int pd_idx = sh->pd_idx; in ops_complete_reconstruct() local
1677 if (dev->written || i == pd_idx || i == qd_idx) { in ops_complete_reconstruct()
1707 int count, pd_idx = sh->pd_idx, i; in ops_run_reconstruct5() local
1719 if (pd_idx == i) in ops_run_reconstruct5()
1726 set_bit(R5_Discard, &sh->dev[pd_idx].flags); in ops_run_reconstruct5()
1738 xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; in ops_run_reconstruct5()
1745 xor_dest = sh->dev[pd_idx].page; in ops_run_reconstruct5()
1748 if (i != pd_idx) in ops_run_reconstruct5()
1801 if (sh->pd_idx == i || sh->qd_idx == i) in ops_run_reconstruct6()
1808 set_bit(R5_Discard, &sh->dev[sh->pd_idx].flags); in ops_run_reconstruct6()
1861 int pd_idx = sh->pd_idx; in ops_run_check_p() local
1875 xor_dest = sh->dev[pd_idx].page; in ops_run_check_p()
1878 if (i == pd_idx || i == qd_idx) in ops_run_check_p()
2541 int pd_idx, qd_idx; in raid5_compute_sector() local
2569 pd_idx = qd_idx = -1; in raid5_compute_sector()
2572 pd_idx = data_disks; in raid5_compute_sector()
2577 pd_idx = data_disks - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2578 if (*dd_idx >= pd_idx) in raid5_compute_sector()
2582 pd_idx = sector_div(stripe2, raid_disks); in raid5_compute_sector()
2583 if (*dd_idx >= pd_idx) in raid5_compute_sector()
2587 pd_idx = data_disks - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2588 *dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks; in raid5_compute_sector()
2591 pd_idx = sector_div(stripe2, raid_disks); in raid5_compute_sector()
2592 *dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks; in raid5_compute_sector()
2595 pd_idx = 0; in raid5_compute_sector()
2599 pd_idx = data_disks; in raid5_compute_sector()
2609 pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2610 qd_idx = pd_idx + 1; in raid5_compute_sector()
2611 if (pd_idx == raid_disks-1) { in raid5_compute_sector()
2614 } else if (*dd_idx >= pd_idx) in raid5_compute_sector()
2618 pd_idx = sector_div(stripe2, raid_disks); in raid5_compute_sector()
2619 qd_idx = pd_idx + 1; in raid5_compute_sector()
2620 if (pd_idx == raid_disks-1) { in raid5_compute_sector()
2623 } else if (*dd_idx >= pd_idx) in raid5_compute_sector()
2627 pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2628 qd_idx = (pd_idx + 1) % raid_disks; in raid5_compute_sector()
2629 *dd_idx = (pd_idx + 2 + *dd_idx) % raid_disks; in raid5_compute_sector()
2632 pd_idx = sector_div(stripe2, raid_disks); in raid5_compute_sector()
2633 qd_idx = (pd_idx + 1) % raid_disks; in raid5_compute_sector()
2634 *dd_idx = (pd_idx + 2 + *dd_idx) % raid_disks; in raid5_compute_sector()
2638 pd_idx = 0; in raid5_compute_sector()
2643 pd_idx = data_disks; in raid5_compute_sector()
2651 pd_idx = sector_div(stripe2, raid_disks); in raid5_compute_sector()
2652 qd_idx = pd_idx + 1; in raid5_compute_sector()
2653 if (pd_idx == raid_disks-1) { in raid5_compute_sector()
2656 } else if (*dd_idx >= pd_idx) in raid5_compute_sector()
2667 pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2668 qd_idx = pd_idx + 1; in raid5_compute_sector()
2669 if (pd_idx == raid_disks-1) { in raid5_compute_sector()
2672 } else if (*dd_idx >= pd_idx) in raid5_compute_sector()
2679 pd_idx = raid_disks - 1 - sector_div(stripe2, raid_disks); in raid5_compute_sector()
2680 qd_idx = (pd_idx + raid_disks - 1) % raid_disks; in raid5_compute_sector()
2681 *dd_idx = (pd_idx + 1 + *dd_idx) % raid_disks; in raid5_compute_sector()
2687 pd_idx = data_disks - sector_div(stripe2, raid_disks-1); in raid5_compute_sector()
2688 if (*dd_idx >= pd_idx) in raid5_compute_sector()
2694 pd_idx = sector_div(stripe2, raid_disks-1); in raid5_compute_sector()
2695 if (*dd_idx >= pd_idx) in raid5_compute_sector()
2701 pd_idx = data_disks - sector_div(stripe2, raid_disks-1); in raid5_compute_sector()
2702 *dd_idx = (pd_idx + 1 + *dd_idx) % (raid_disks-1); in raid5_compute_sector()
2707 pd_idx = sector_div(stripe2, raid_disks-1); in raid5_compute_sector()
2708 *dd_idx = (pd_idx + 1 + *dd_idx) % (raid_disks-1); in raid5_compute_sector()
2713 pd_idx = 0; in raid5_compute_sector()
2725 sh->pd_idx = pd_idx; in raid5_compute_sector()
2756 if (i == sh->pd_idx) in raid5_compute_blocknr()
2764 if (i > sh->pd_idx) in raid5_compute_blocknr()
2769 if (i < sh->pd_idx) in raid5_compute_blocknr()
2771 i -= (sh->pd_idx + 1); in raid5_compute_blocknr()
2790 if (sh->pd_idx == raid_disks-1) in raid5_compute_blocknr()
2792 else if (i > sh->pd_idx) in raid5_compute_blocknr()
2797 if (sh->pd_idx == raid_disks-1) in raid5_compute_blocknr()
2801 if (i < sh->pd_idx) in raid5_compute_blocknr()
2803 i -= (sh->pd_idx + 2); in raid5_compute_blocknr()
2813 if (sh->pd_idx == 0) in raid5_compute_blocknr()
2817 if (i < sh->pd_idx) in raid5_compute_blocknr()
2819 i -= (sh->pd_idx + 1); in raid5_compute_blocknr()
2824 if (i > sh->pd_idx) in raid5_compute_blocknr()
2829 if (i < sh->pd_idx) in raid5_compute_blocknr()
2831 i -= (sh->pd_idx + 1); in raid5_compute_blocknr()
2847 if (check != sh->sector || dummy1 != dd_idx || sh2.pd_idx != sh->pd_idx in raid5_compute_blocknr()
2860 int i, pd_idx = sh->pd_idx, qd_idx = sh->qd_idx, disks = sh->disks; in schedule_reconstruction() local
2896 BUG_ON(!(test_bit(R5_UPTODATE, &sh->dev[pd_idx].flags) || in schedule_reconstruction()
2897 test_bit(R5_Wantcompute, &sh->dev[pd_idx].flags))); in schedule_reconstruction()
2904 if (i == pd_idx || i == qd_idx) in schedule_reconstruction()
2928 set_bit(R5_LOCKED, &sh->dev[pd_idx].flags); in schedule_reconstruction()
2929 clear_bit(R5_UPTODATE, &sh->dev[pd_idx].flags); in schedule_reconstruction()
3200 if (test_and_clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags)) in handle_failed_sync()
3341 if (s->failed_num[i] != sh->pd_idx && in need_this_block()
3514 test_bit(R5_Discard, &sh->dev[sh->pd_idx].flags)) { in handle_stripe_clean_event()
3516 clear_bit(R5_Discard, &sh->dev[sh->pd_idx].flags); in handle_stripe_clean_event()
3517 clear_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags); in handle_stripe_clean_event()
3583 if ((dev->towrite || i == sh->pd_idx || i == sh->qd_idx) && in handle_stripe_dirtying()
3594 i != sh->pd_idx && i != sh->qd_idx && in handle_stripe_dirtying()
3615 if ((dev->towrite || i == sh->pd_idx || i == sh->qd_idx) && in handle_stripe_dirtying()
3641 i != sh->pd_idx && i != sh->qd_idx && in handle_stripe_dirtying()
3702 clear_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags); in handle_parity_checks5()
3711 dev = &sh->dev[sh->pd_idx]; in handle_parity_checks5()
3758 &sh->dev[sh->pd_idx].flags); in handle_parity_checks5()
3759 sh->ops.target = sh->pd_idx; in handle_parity_checks5()
3779 int pd_idx = sh->pd_idx; in handle_parity_checks6() local
3819 clear_bit(R5_UPTODATE, &sh->dev[pd_idx].flags); in handle_parity_checks6()
3858 dev = &sh->dev[pd_idx]; in handle_parity_checks6()
3915 &sh->dev[pd_idx].flags); in handle_parity_checks6()
3916 *target = pd_idx; in handle_parity_checks6()
3950 if (i != sh->pd_idx && i != sh->qd_idx) { in handle_stripe_expansion()
3981 if (j != sh2->pd_idx && in handle_stripe_expansion()
4332 atomic_read(&sh->count), sh->pd_idx, sh->qd_idx, in handle_stripe()
4391 BUG_ON(!test_bit(R5_UPTODATE, &sh->dev[sh->pd_idx].flags) && in handle_stripe()
4392 !test_bit(R5_Discard, &sh->dev[sh->pd_idx].flags)); in handle_stripe()
4399 (i == sh->pd_idx || i == sh->qd_idx || in handle_stripe()
4408 ((i == sh->pd_idx || i == sh->qd_idx) && in handle_stripe()
4421 pdev = &sh->dev[sh->pd_idx]; in handle_stripe()
4422 s.p_failed = (s.failed >= 1 && s.failed_num[0] == sh->pd_idx) in handle_stripe()
4423 || (s.failed >= 2 && s.failed_num[1] == sh->pd_idx); in handle_stripe()
4495 if (test_and_clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags)) in handle_stripe()
5086 set_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags); in make_discard_request()
5092 clear_bit(R5_Overlap, &sh->dev[sh->pd_idx].flags); in make_discard_request()
5095 if (d == sh->pd_idx || d == sh->qd_idx) in make_discard_request()
5109 if (d == sh->pd_idx || d == sh->qd_idx) in make_discard_request()
5493 if (j == sh->pd_idx) in reshape_request()