device_prep_dma_xor 65 crypto/async_tx/async_xor.c tx = dma->device_prep_dma_xor(chan, dma_dest, src_list, device_prep_dma_xor 75 crypto/async_tx/async_xor.c tx = dma->device_prep_dma_xor(chan, dma_dest, device_prep_dma_xor 1590 drivers/dma/bcm-sba-raid.c dma_dev->device_prep_dma_xor = sba_prep_dma_xor; device_prep_dma_xor 931 drivers/dma/dmaengine.c if (dma_has_cap(DMA_XOR, device->cap_mask) && !device->device_prep_dma_xor) { device_prep_dma_xor 771 drivers/dma/dmatest.c tx = dev->device_prep_dma_xor(chan, device_prep_dma_xor 793 drivers/dma/fsl_raid.c dma_dev->device_prep_dma_xor = fsl_re_prep_dma_xor; device_prep_dma_xor 867 drivers/dma/ioat/init.c tx = dma->device_prep_dma_xor(dma_chan, dest_dma, dma_srcs, device_prep_dma_xor 1122 drivers/dma/ioat/init.c dma->device_prep_dma_xor = ioat_prep_xor; device_prep_dma_xor 1141 drivers/dma/ioat/init.c dma->device_prep_dma_xor = ioat_prep_pqxor; device_prep_dma_xor 1326 drivers/dma/iop-adma.c dma_dev->device_prep_dma_xor = iop_adma_prep_dma_xor; device_prep_dma_xor 1095 drivers/dma/mv_xor.c dma_dev->device_prep_dma_xor = mv_xor_prep_dma_xor; device_prep_dma_xor 847 drivers/dma/mv_xor_v2.c dma_dev->device_prep_dma_xor = mv_xor_v2_prep_dma_xor; device_prep_dma_xor 3804 drivers/dma/ppc4xx/adma.c adev->common.device_prep_dma_xor = device_prep_dma_xor 1516 drivers/dma/xgene-dma.c dma_dev->device_prep_dma_xor = xgene_dma_prep_xor; device_prep_dma_xor 756 include/linux/dmaengine.h struct dma_async_tx_descriptor *(*device_prep_dma_xor)(