Lines Matching refs:src_cnt

45 	int src_cnt = unmap->to_cnt;  in do_async_xor()  local
50 while (src_cnt) { in do_async_xor()
54 xor_src_cnt = min(src_cnt, (int)dma->max_xor); in do_async_xor()
58 if (src_cnt > xor_src_cnt) { in do_async_xor()
99 if (src_cnt > xor_src_cnt) { in do_async_xor()
101 src_cnt -= xor_src_cnt; in do_async_xor()
103 src_cnt++; in do_async_xor()
114 int src_cnt, size_t len, struct async_submit_ctl *submit) in do_sync_xor() argument
128 for (i = 0; i < src_cnt; i++) in do_sync_xor()
131 src_cnt = xor_src_cnt; in do_sync_xor()
138 while (src_cnt > 0) { in do_sync_xor()
140 xor_src_cnt = min(src_cnt, MAX_XOR_BLOCKS); in do_sync_xor()
144 src_cnt -= xor_src_cnt; in do_sync_xor()
174 int src_cnt, size_t len, struct async_submit_ctl *submit) in async_xor() argument
178 src_cnt, len); in async_xor()
182 BUG_ON(src_cnt <= 1); in async_xor()
185 unmap = dmaengine_get_unmap_data(device->dev, src_cnt+1, GFP_NOWAIT); in async_xor()
195 for (i = 0, j = 0; i < src_cnt; i++) { in async_xor()
222 src_cnt--; in async_xor()
229 do_sync_xor(dest, src_list, offset, src_cnt, len, submit); in async_xor()
243 struct page **src_list, int src_cnt, size_t len) in xor_val_chan() argument
249 src_cnt, len); in xor_val_chan()
270 int src_cnt, size_t len, enum sum_check_flags *result, in async_xor_val() argument
273 struct dma_chan *chan = xor_val_chan(submit, dest, src_list, src_cnt, len); in async_xor_val()
278 BUG_ON(src_cnt <= 1); in async_xor_val()
281 unmap = dmaengine_get_unmap_data(device->dev, src_cnt, GFP_NOWAIT); in async_xor_val()
283 if (unmap && src_cnt <= device->max_xor && in async_xor_val()
295 for (i = 0; i < src_cnt; i++) { in async_xor_val()
302 tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt, in async_xor_val()
311 unmap->addr, src_cnt, len, result, in async_xor_val()
321 WARN_ONCE(device && src_cnt <= device->max_xor, in async_xor_val()
328 tx = async_xor(dest, src_list, offset, src_cnt, len, submit); in async_xor_val()