xor_srcs 789 drivers/dma/ioat/init.c struct page *xor_srcs[IOAT_NUM_SRC_TEST]; xor_srcs 812 drivers/dma/ioat/init.c xor_srcs[src_idx] = alloc_page(GFP_KERNEL); xor_srcs 813 drivers/dma/ioat/init.c if (!xor_srcs[src_idx]) { xor_srcs 815 drivers/dma/ioat/init.c __free_page(xor_srcs[src_idx]); xor_srcs 823 drivers/dma/ioat/init.c __free_page(xor_srcs[src_idx]); xor_srcs 829 drivers/dma/ioat/init.c u8 *ptr = page_address(xor_srcs[src_idx]); xor_srcs 860 drivers/dma/ioat/init.c dma_srcs[i] = dma_map_page(dev, xor_srcs[i], 0, PAGE_SIZE, xor_srcs 923 drivers/dma/ioat/init.c xor_val_srcs[i] = xor_srcs[i]; xor_srcs 1045 drivers/dma/ioat/init.c __free_page(xor_srcs[src_idx]); xor_srcs 910 drivers/dma/iop-adma.c struct page *xor_srcs[IOP_ADMA_NUM_SRC_TEST]; xor_srcs 926 drivers/dma/iop-adma.c xor_srcs[src_idx] = alloc_page(GFP_KERNEL); xor_srcs 927 drivers/dma/iop-adma.c if (!xor_srcs[src_idx]) { xor_srcs 929 drivers/dma/iop-adma.c __free_page(xor_srcs[src_idx]); xor_srcs 937 drivers/dma/iop-adma.c __free_page(xor_srcs[src_idx]); xor_srcs 943 drivers/dma/iop-adma.c u8 *ptr = page_address(xor_srcs[src_idx]); xor_srcs 968 drivers/dma/iop-adma.c dma_srcs[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], xor_srcs 1007 drivers/dma/iop-adma.c zero_sum_srcs[i] = xor_srcs[i]; xor_srcs 1073 drivers/dma/iop-adma.c __free_page(xor_srcs[src_idx]); xor_srcs 876 drivers/dma/mv_xor.c struct page *xor_srcs[MV_XOR_NUM_SRC_TEST]; xor_srcs 889 drivers/dma/mv_xor.c xor_srcs[src_idx] = alloc_page(GFP_KERNEL); xor_srcs 890 drivers/dma/mv_xor.c if (!xor_srcs[src_idx]) { xor_srcs 892 drivers/dma/mv_xor.c __free_page(xor_srcs[src_idx]); xor_srcs 900 drivers/dma/mv_xor.c __free_page(xor_srcs[src_idx]); xor_srcs 906 drivers/dma/mv_xor.c u8 *ptr = page_address(xor_srcs[src_idx]); xor_srcs 934 drivers/dma/mv_xor.c unmap->addr[i] = dma_map_page(dma_chan->device->dev, xor_srcs[i], xor_srcs 1004 drivers/dma/mv_xor.c __free_page(xor_srcs[src_idx]); xor_srcs 720 drivers/md/raid5-ppl.c struct page *xor_srcs[] = { page1, page2 }; xor_srcs 724 drivers/md/raid5-ppl.c tx = async_xor(page1, xor_srcs, 0, 2, size, &submit); xor_srcs 1408 drivers/md/raid5.c struct page **xor_srcs = to_addr_page(percpu, 0); xor_srcs 1425 drivers/md/raid5.c xor_srcs[count++] = sh->dev[i].page; xor_srcs 1432 drivers/md/raid5.c tx = async_memcpy(xor_dest, xor_srcs[0], 0, 0, STRIPE_SIZE, &submit); xor_srcs 1434 drivers/md/raid5.c tx = async_xor(xor_dest, xor_srcs, 0, count, STRIPE_SIZE, &submit); xor_srcs 1672 drivers/md/raid5.c struct page **xor_srcs = to_addr_page(percpu, 0); xor_srcs 1677 drivers/md/raid5.c struct page *xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; xor_srcs 1687 drivers/md/raid5.c xor_srcs[count++] = dev->orig_page; xor_srcs 1689 drivers/md/raid5.c xor_srcs[count++] = dev->page; xor_srcs 1694 drivers/md/raid5.c tx = async_xor(xor_dest, xor_srcs, 0, count, STRIPE_SIZE, &submit); xor_srcs 1842 drivers/md/raid5.c struct page **xor_srcs; xor_srcs 1869 drivers/md/raid5.c xor_srcs = to_addr_page(percpu, j); xor_srcs 1875 drivers/md/raid5.c xor_dest = xor_srcs[count++] = sh->dev[pd_idx].page; xor_srcs 1880 drivers/md/raid5.c xor_srcs[count++] = dev->page; xor_srcs 1887 drivers/md/raid5.c xor_srcs[count++] = dev->page; xor_srcs 1913 drivers/md/raid5.c tx = async_memcpy(xor_dest, xor_srcs[0], 0, 0, STRIPE_SIZE, &submit); xor_srcs 1915 drivers/md/raid5.c tx = async_xor(xor_dest, xor_srcs, 0, count, STRIPE_SIZE, &submit); xor_srcs 2002 drivers/md/raid5.c struct page **xor_srcs = to_addr_page(percpu, 0); xor_srcs 2014 drivers/md/raid5.c xor_srcs[count++] = xor_dest; xor_srcs 2018 drivers/md/raid5.c xor_srcs[count++] = sh->dev[i].page; xor_srcs 2023 drivers/md/raid5.c tx = async_xor_val(xor_dest, xor_srcs, 0, count, STRIPE_SIZE,