Searched refs:src_cnt (Results 1 - 18 of 18) sorted by relevance

/linux-4.1.27/crypto/async_tx/
H A Dasync_xor.c45 int src_cnt = unmap->to_cnt; do_async_xor() local
50 while (src_cnt) { do_async_xor()
54 xor_src_cnt = min(src_cnt, (int)dma->max_xor); do_async_xor()
58 if (src_cnt > xor_src_cnt) { do_async_xor()
99 if (src_cnt > xor_src_cnt) { do_async_xor()
101 src_cnt -= xor_src_cnt; do_async_xor()
103 src_cnt++; do_async_xor()
114 int src_cnt, size_t len, struct async_submit_ctl *submit) do_sync_xor()
128 for (i = 0; i < src_cnt; i++) do_sync_xor()
131 src_cnt = xor_src_cnt; do_sync_xor()
138 while (src_cnt > 0) { do_sync_xor()
140 xor_src_cnt = min(src_cnt, MAX_XOR_BLOCKS); do_sync_xor()
144 src_cnt -= xor_src_cnt; do_sync_xor()
156 * @src_cnt: number of source pages
174 int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor()
178 src_cnt, len); async_xor()
182 BUG_ON(src_cnt <= 1); async_xor()
185 unmap = dmaengine_get_unmap_data(device->dev, src_cnt+1, GFP_NOIO); async_xor()
195 for (i = 0, j = 0; i < src_cnt; i++) { async_xor()
222 src_cnt--; async_xor()
229 do_sync_xor(dest, src_list, offset, src_cnt, len, submit); async_xor()
243 struct page **src_list, int src_cnt, size_t len) xor_val_chan()
249 src_cnt, len); xor_val_chan()
257 * @src_cnt: number of source pages
270 int src_cnt, size_t len, enum sum_check_flags *result, async_xor_val()
273 struct dma_chan *chan = xor_val_chan(submit, dest, src_list, src_cnt, len); async_xor_val()
278 BUG_ON(src_cnt <= 1); async_xor_val()
281 unmap = dmaengine_get_unmap_data(device->dev, src_cnt, GFP_NOIO); async_xor_val()
283 if (unmap && src_cnt <= device->max_xor && async_xor_val()
295 for (i = 0; i < src_cnt; i++) { async_xor_val()
302 tx = device->device_prep_dma_xor_val(chan, unmap->addr, src_cnt, async_xor_val()
311 unmap->addr, src_cnt, len, result, async_xor_val()
321 WARN_ONCE(device && src_cnt <= device->max_xor, async_xor_val()
328 tx = async_xor(dest, src_list, offset, src_cnt, len, submit); async_xor_val()
113 do_sync_xor(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, struct async_submit_ctl *submit) do_sync_xor() argument
173 async_xor(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, struct async_submit_ctl *submit) async_xor() argument
242 xor_val_chan(struct async_submit_ctl *submit, struct page *dest, struct page **src_list, int src_cnt, size_t len) xor_val_chan() argument
269 async_xor_val(struct page *dest, struct page **src_list, unsigned int offset, int src_cnt, size_t len, enum sum_check_flags *result, struct async_submit_ctl *submit) async_xor_val() argument
H A Dasync_pq.c60 int src_cnt = disks - 2; do_async_gen_syndrome() local
68 while (src_cnt > 0) { do_async_gen_syndrome()
70 pq_src_cnt = min(src_cnt, dma_maxpq(dma, dma_flags)); do_async_gen_syndrome()
75 if (src_cnt > pq_src_cnt) { do_async_gen_syndrome()
109 src_cnt -= pq_src_cnt; do_async_gen_syndrome()
181 int src_cnt = disks - 2; async_gen_syndrome() local
184 blocks, src_cnt, len); async_gen_syndrome()
195 (src_cnt <= dma_maxpq(device, 0) || async_gen_syndrome()
200 unsigned char coefs[src_cnt]; async_gen_syndrome()
211 for (i = 0, j = 0; i < src_cnt; i++) { async_gen_syndrome()
316 int i, j = 0, src_cnt = 0; async_syndrome_val() local
329 src_cnt++; async_syndrome_val()
359 src_cnt, async_syndrome_val()
/linux-4.1.27/arch/powerpc/include/asm/
H A Dasync_tx.h30 int src_cnt, size_t src_sz);
33 src_cnt, src_sz) \
35 src_cnt, src_sz)
/linux-4.1.27/drivers/dma/ioat/
H A Ddma_v3.c294 if (src_cnt_to_sw(xor->ctl_f.src_cnt) > 5) desc_has_ext()
300 if (src_cnt_to_sw(pq->ctl_f.src_cnt) > 3) desc_has_ext()
663 dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, __ioat3_prep_xor_lock()
678 BUG_ON(src_cnt < 2); __ioat3_prep_xor_lock()
684 if (src_cnt > 5) { __ioat3_prep_xor_lock()
717 for (s = 0; s < src_cnt; s++) __ioat3_prep_xor_lock()
723 xor->ctl_f.src_cnt = src_cnt_to_hw(src_cnt); __ioat3_prep_xor_lock()
754 unsigned int src_cnt, size_t len, unsigned long flags) ioat3_prep_xor()
756 return __ioat3_prep_xor_lock(chan, NULL, dest, src, src_cnt, len, flags); ioat3_prep_xor()
761 unsigned int src_cnt, size_t len, ioat3_prep_xor_val()
770 src_cnt - 1, len, flags); ioat3_prep_xor_val()
780 int src_cnt = src_cnt_to_sw(pq->ctl_f.src_cnt); dump_pq_desc_dbg() local
785 " src_cnt: %d)\n", dump_pq_desc_dbg()
791 pq->ctl_f.src_cnt); dump_pq_desc_dbg()
792 for (i = 0; i < src_cnt; i++) dump_pq_desc_dbg()
808 int src_cnt = src16_cnt_to_sw(pq->ctl_f.src_cnt); dump_pq16_desc_dbg() local
818 " src_cnt: %d)\n", dump_pq16_desc_dbg()
825 pq->ctl_f.src_cnt); dump_pq16_desc_dbg()
826 for (i = 0; i < src_cnt; i++) { dump_pq16_desc_dbg()
838 unsigned int src_cnt, const unsigned char *scf, __ioat3_prep_pq_lock()
860 BUG_ON(src_cnt + dmaf_continue(flags) < 2); __ioat3_prep_pq_lock()
867 if (src_cnt + dmaf_p_disabled_continue(flags) > 3 || __ioat3_prep_pq_lock()
902 for (s = 0; s < src_cnt; s++) __ioat3_prep_pq_lock()
921 pq->ctl_f.src_cnt = src_cnt_to_hw(s); __ioat3_prep_pq_lock()
962 unsigned int src_cnt, const unsigned char *scf, __ioat3_prep_pq16_lock()
1002 desc->sed = ioat3_alloc_sed(device, (src_cnt-2) >> 3); __ioat3_prep_pq16_lock()
1015 for (s = 0; s < src_cnt; s++) __ioat3_prep_pq16_lock()
1032 pq->ctl_f.src_cnt = src16_cnt_to_hw(s); __ioat3_prep_pq16_lock()
1060 static int src_cnt_flags(unsigned int src_cnt, unsigned long flags) src_cnt_flags() argument
1063 return src_cnt + 1; src_cnt_flags()
1065 return src_cnt + 3; src_cnt_flags()
1067 return src_cnt; src_cnt_flags()
1072 unsigned int src_cnt, const unsigned char *scf, size_t len, ioat3_prep_pq()
1084 if ((flags & DMA_PREP_PQ_DISABLE_P) && src_cnt == 1) { ioat3_prep_pq()
1094 return src_cnt_flags(src_cnt, flags) > 8 ? ioat3_prep_pq()
1102 return src_cnt_flags(src_cnt, flags) > 8 ? ioat3_prep_pq()
1103 __ioat3_prep_pq16_lock(chan, NULL, dst, src, src_cnt, ioat3_prep_pq()
1105 __ioat3_prep_pq_lock(chan, NULL, dst, src, src_cnt, ioat3_prep_pq()
1112 unsigned int src_cnt, const unsigned char *scf, size_t len, ioat3_prep_pq_val()
1126 return src_cnt_flags(src_cnt, flags) > 8 ? ioat3_prep_pq_val()
1127 __ioat3_prep_pq16_lock(chan, pqres, pq, src, src_cnt, scf, len, ioat3_prep_pq_val()
1129 __ioat3_prep_pq_lock(chan, pqres, pq, src, src_cnt, scf, len, ioat3_prep_pq_val()
1135 unsigned int src_cnt, size_t len, unsigned long flags) ioat3_prep_pqxor()
1137 unsigned char scf[src_cnt]; ioat3_prep_pqxor()
1140 memset(scf, 0, src_cnt); ioat3_prep_pqxor()
1145 return src_cnt_flags(src_cnt, flags) > 8 ? ioat3_prep_pqxor()
1146 __ioat3_prep_pq16_lock(chan, NULL, pq, src, src_cnt, scf, len, ioat3_prep_pqxor()
1148 __ioat3_prep_pq_lock(chan, NULL, pq, src, src_cnt, scf, len, ioat3_prep_pqxor()
1154 unsigned int src_cnt, size_t len, ioat3_prep_pqxor_val()
1157 unsigned char scf[src_cnt]; ioat3_prep_pqxor_val()
1165 memset(scf, 0, src_cnt); ioat3_prep_pqxor_val()
1170 return src_cnt_flags(src_cnt, flags) > 8 ? ioat3_prep_pqxor_val()
1171 __ioat3_prep_pq16_lock(chan, result, pq, &src[1], src_cnt - 1, ioat3_prep_pqxor_val()
1173 __ioat3_prep_pq_lock(chan, result, pq, &src[1], src_cnt - 1, ioat3_prep_pqxor_val()
662 __ioat3_prep_xor_lock(struct dma_chan *c, enum sum_check_flags *result, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) __ioat3_prep_xor_lock() argument
753 ioat3_prep_xor(struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) ioat3_prep_xor() argument
760 ioat3_prep_xor_val(struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, size_t len, enum sum_check_flags *result, unsigned long flags) ioat3_prep_xor_val() argument
836 __ioat3_prep_pq_lock(struct dma_chan *c, enum sum_check_flags *result, const dma_addr_t *dst, const dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) __ioat3_prep_pq_lock() argument
960 __ioat3_prep_pq16_lock(struct dma_chan *c, enum sum_check_flags *result, const dma_addr_t *dst, const dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) __ioat3_prep_pq16_lock() argument
1071 ioat3_prep_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ioat3_prep_pq() argument
1111 ioat3_prep_pq_val(struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, enum sum_check_flags *pqres, unsigned long flags) ioat3_prep_pq_val() argument
1134 ioat3_prep_pqxor(struct dma_chan *chan, dma_addr_t dst, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) ioat3_prep_pqxor() argument
1153 ioat3_prep_pqxor_val(struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, size_t len, enum sum_check_flags *result, unsigned long flags) ioat3_prep_pqxor_val() argument
H A Dhw.h114 unsigned int src_cnt:3; member in struct:ioat_xor_descriptor::__anon3857::__anon3858
161 unsigned int src_cnt:3; member in struct:ioat_pq_descriptor::__anon3861::__anon3862
210 unsigned int src_cnt:3; member in struct:ioat_pq_update_descriptor::__anon3864::__anon3865
/linux-4.1.27/arch/arm/include/asm/hardware/
H A Diop3xx-adma.h192 iop_chan_pq_slot_count(size_t len, int src_cnt, int *slots_per_op) iop_chan_pq_slot_count() argument
199 iop_desc_init_pq(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_pq() argument
219 iop_chan_pq_zero_sum_slot_count(size_t len, int src_cnt, int *slots_per_op) iop_chan_pq_zero_sum_slot_count() argument
226 iop_desc_init_pq_zero_sum(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_pq_zero_sum() argument
323 static inline int iop3xx_aau_xor_slot_count(size_t len, int src_cnt, iop3xx_aau_xor_slot_count() argument
336 *slots_per_op = slot_count_table[src_cnt - 1]; iop3xx_aau_xor_slot_count()
355 static inline int iop_chan_xor_slot_count(size_t len, int src_cnt, iop_chan_xor_slot_count() argument
358 int slot_cnt = iop3xx_aau_xor_slot_count(len, src_cnt, slots_per_op); iop_chan_xor_slot_count()
377 static inline int iop_chan_zero_sum_slot_count(size_t len, int src_cnt, iop_chan_zero_sum_slot_count() argument
380 int slot_cnt = iop3xx_aau_xor_slot_count(len, src_cnt, slots_per_op); iop_chan_zero_sum_slot_count()
495 iop3xx_desc_init_xor(struct iop3xx_desc_aau *hw_desc, int src_cnt, iop3xx_desc_init_xor() argument
506 switch (src_cnt) { iop3xx_desc_init_xor()
511 for (i = 24; i < src_cnt; i++) { iop3xx_desc_init_xor()
516 src_cnt = 24; iop3xx_desc_init_xor()
525 for (i = 16; i < src_cnt; i++) { iop3xx_desc_init_xor()
530 src_cnt = 16; iop3xx_desc_init_xor()
537 for (i = 8; i < src_cnt; i++) { iop3xx_desc_init_xor()
542 src_cnt = 8; iop3xx_desc_init_xor()
546 for (i = 0; i < src_cnt; i++) { iop3xx_desc_init_xor()
551 if (!u_desc_ctrl.field.blk_ctrl && src_cnt > 4) iop3xx_desc_init_xor()
564 iop_desc_init_xor(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_xor() argument
567 iop3xx_desc_init_xor(desc->hw_desc, src_cnt, flags); iop_desc_init_xor()
572 iop_desc_init_zero_sum(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_zero_sum() argument
588 u_desc_ctrl.value = iop3xx_desc_init_xor(iter, src_cnt, flags); iop_desc_init_zero_sum()
609 iop_desc_init_null_xor(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_null_xor() argument
619 switch (src_cnt) { iop_desc_init_null_xor()
637 if (!u_desc_ctrl.field.blk_ctrl && src_cnt > 4) iop_desc_init_null_xor()
/linux-4.1.27/drivers/dma/ppc4xx/
H A Dadma.c127 int src_cnt, u32 addr);
193 unsigned int src_cnt) prep_dma_xor_dbg()
198 for (i = 0; i < src_cnt; i++) prep_dma_xor_dbg()
204 unsigned int src_cnt) prep_dma_pq_dbg()
209 for (i = 0; i < src_cnt; i++) prep_dma_pq_dbg()
217 unsigned int src_cnt, prep_dma_pqzero_sum_dbg()
224 for (i = 0; i < src_cnt; i++) prep_dma_pqzero_sum_dbg()
227 for (i = 0; i < src_cnt; i++) prep_dma_pqzero_sum_dbg()
233 pr_debug("\t0x%016llx ", src[src_cnt + i]); prep_dma_pqzero_sum_dbg()
276 desc->src_cnt = 0; ppc440spe_desc_init_null_xor()
284 int src_cnt, unsigned long flags) ppc440spe_desc_init_xor()
290 desc->src_cnt = src_cnt; ppc440spe_desc_init_xor()
293 hw_desc->cbc = XOR_CBCR_TGT_BIT | src_cnt; ppc440spe_desc_init_xor()
304 int dst_cnt, int src_cnt, unsigned long flags) ppc440spe_desc_init_dma2pq()
310 desc->src_cnt = src_cnt; ppc440spe_desc_init_dma2pq()
330 int dst_cnt, int src_cnt, unsigned long flags, ppc440spe_desc_init_dma01pq()
339 desc->src_cnt = src_cnt; ppc440spe_desc_init_dma01pq()
444 int dst_cnt, int src_cnt) ppc440spe_desc_init_dma01pqzero_sum()
470 iter->src_cnt = 0; ppc440spe_desc_init_dma01pqzero_sum()
474 * - <src_cnt> descriptors starting from 2nd or 3rd ppc440spe_desc_init_dma01pqzero_sum()
478 if (i++ < src_cnt) ppc440spe_desc_init_dma01pqzero_sum()
506 desc->src_cnt = src_cnt; ppc440spe_desc_init_dma01pqzero_sum()
520 desc->src_cnt = 1; ppc440spe_desc_init_memcpy()
805 static int ppc440spe_chan_xor_slot_count(size_t len, int src_cnt, ppc440spe_chan_xor_slot_count() argument
811 slot_cnt = *slots_per_op = (src_cnt + XOR_MAX_OPS - 1)/XOR_MAX_OPS; ppc440spe_chan_xor_slot_count()
827 int src_cnt, size_t len) ppc440spe_dma2_pq_slot_count()
833 for (i = 1; i < src_cnt; i++) { ppc440spe_dma2_pq_slot_count()
842 if (i == src_cnt-1) ppc440spe_dma2_pq_slot_count()
848 if (i == src_cnt-1) ppc440spe_dma2_pq_slot_count()
855 if (i == src_cnt-2 || (order == -1 ppc440spe_dma2_pq_slot_count()
862 if (i == src_cnt-1) ppc440spe_dma2_pq_slot_count()
866 if (i == src_cnt-1) ppc440spe_dma2_pq_slot_count()
870 if (i == src_cnt-1) ppc440spe_dma2_pq_slot_count()
887 if (src_cnt <= 1 || (state != 1 && state != 2)) { ppc440spe_dma2_pq_slot_count()
888 pr_err("%s: src_cnt=%d, state=%d, addr_count=%d, order=%lld\n", ppc440spe_dma2_pq_slot_count()
889 __func__, src_cnt, state, addr_count, order); ppc440spe_dma2_pq_slot_count()
890 for (i = 0; i < src_cnt; i++) ppc440spe_dma2_pq_slot_count()
1265 static int ppc440spe_can_rxor(struct page **srcs, int src_cnt, size_t len) ppc440spe_can_rxor() argument
1270 if (unlikely(!(src_cnt > 1))) ppc440spe_can_rxor()
1273 BUG_ON(src_cnt > ARRAY_SIZE(ppc440spe_rxor_srcs)); ppc440spe_can_rxor()
1276 for (i = 0; i < src_cnt; i++) { ppc440spe_can_rxor()
1281 src_cnt = idx; ppc440spe_can_rxor()
1283 for (i = 1; i < src_cnt; i++) { ppc440spe_can_rxor()
1301 if ((i == src_cnt - 2) || ppc440spe_can_rxor()
1337 * @src_cnt: number of source operands
1342 struct page **src_lst, int src_cnt, size_t src_sz) ppc440spe_adma_estimate()
1365 if (dst_cnt == 1 && src_cnt == 2 && dst_lst[0] == src_lst[1]) ppc440spe_adma_estimate()
1367 else if (ppc440spe_can_rxor(src_lst, src_cnt, src_sz)) ppc440spe_adma_estimate()
1384 int src_cnt, size_t src_sz) ppc440spe_async_tx_find_best_channel()
1400 if (src_cnt == 1 && dst_lst[1] == src_lst[0]) ppc440spe_async_tx_find_best_channel()
1402 if (src_cnt == 2 && dst_lst[1] == src_lst[1]) ppc440spe_async_tx_find_best_channel()
1418 dst_cnt, src_lst, src_cnt, src_sz); ppc440spe_async_tx_find_best_channel()
1440 if (entry_idx < 0 || entry_idx >= (tdesc->src_cnt + tdesc->dst_cnt)) { ppc440spe_get_group_entry()
1441 printk("%s: entry_idx %d, src_cnt %d, dst_cnt %d\n", ppc440spe_get_group_entry()
1442 __func__, entry_idx, tdesc->src_cnt, tdesc->dst_cnt); ppc440spe_get_group_entry()
2035 dma_addr_t *dma_src, u32 src_cnt, size_t len, ppc440spe_adma_prep_dma_xor()
2045 dma_dest, dma_src, src_cnt)); ppc440spe_adma_prep_dma_xor()
2051 "ppc440spe adma%d: %s src_cnt: %d len: %u int_en: %d\n", ppc440spe_adma_prep_dma_xor()
2052 ppc440spe_chan->device->id, __func__, src_cnt, len, ppc440spe_adma_prep_dma_xor()
2056 slot_cnt = ppc440spe_chan_xor_slot_count(len, src_cnt, &slots_per_op); ppc440spe_adma_prep_dma_xor()
2061 ppc440spe_desc_init_xor(group_start, src_cnt, flags); ppc440spe_adma_prep_dma_xor()
2063 while (src_cnt--) ppc440spe_adma_prep_dma_xor()
2065 dma_src[src_cnt], src_cnt); ppc440spe_adma_prep_dma_xor()
2077 int src_cnt);
2085 dma_addr_t *src, int src_cnt) ppc440spe_adma_init_dma2rxor_slot()
2090 for (i = 0; i < src_cnt; i++) { ppc440spe_adma_init_dma2rxor_slot()
2092 desc->src_cnt, (u32)src[i]); ppc440spe_adma_init_dma2rxor_slot()
2102 dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, ppc440spe_dma01_prep_mult()
2123 sw_desc->src_cnt = src_cnt; ppc440spe_dma01_prep_mult()
2188 dma_addr_t *dst, dma_addr_t *src, int src_cnt, ppc440spe_dma01_prep_sum_product()
2209 sw_desc->src_cnt = src_cnt; ppc440spe_dma01_prep_sum_product()
2292 dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, ppc440spe_dma01_prep_pq()
2300 pr_debug("%s: dst_cnt %d, src_cnt %d, len %d\n", ppc440spe_dma01_prep_pq()
2301 __func__, dst_cnt, src_cnt, len); ppc440spe_dma01_prep_pq()
2314 if (src_cnt > 1 && ppc440spe_dma01_prep_pq()
2319 if (src_cnt != 2) { ppc440spe_dma01_prep_pq()
2359 slot_cnt = src_cnt; ppc440spe_dma01_prep_pq()
2371 * need (src_cnt - (2 or 3)) for WXOR of sources ppc440spe_dma01_prep_pq()
2382 slot_cnt += src_cnt - 2; ppc440spe_dma01_prep_pq()
2384 slot_cnt += src_cnt - 3; ppc440spe_dma01_prep_pq()
2398 ppc440spe_desc_init_dma01pq(sw_desc, dst_cnt, src_cnt, ppc440spe_dma01_prep_pq()
2405 while (src_cnt--) { ppc440spe_dma01_prep_pq()
2406 ppc440spe_adma_pq_set_src(sw_desc, src[src_cnt], ppc440spe_dma01_prep_pq()
2407 src_cnt); ppc440spe_dma01_prep_pq()
2416 mult = scf[src_cnt]; ppc440spe_dma01_prep_pq()
2418 mult, src_cnt, dst_cnt - 1); ppc440spe_dma01_prep_pq()
2437 dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, ppc440spe_dma2_prep_pq()
2446 /*pr_debug("%s: dst_cnt %d, src_cnt %d, len %d\n", ppc440spe_dma2_prep_pq()
2447 __func__, dst_cnt, src_cnt, len);*/ ppc440spe_dma2_prep_pq()
2450 descs_per_op = ppc440spe_dma2_pq_slot_count(src, src_cnt, len); ppc440spe_dma2_prep_pq()
2464 ppc440spe_desc_init_dma2pq(iter, dst_cnt, src_cnt, ppc440spe_dma2_prep_pq()
2479 src_cnt); ppc440spe_dma2_prep_pq()
2504 while (src_cnt--) { ppc440spe_dma2_prep_pq()
2508 ppc440spe_adma_pq_set_src(sw_desc, src[src_cnt], ppc440spe_dma2_prep_pq()
2509 src_cnt); ppc440spe_dma2_prep_pq()
2511 mult = scf[src_cnt]; ppc440spe_dma2_prep_pq()
2513 mult, src_cnt, dst_cnt - 1); ppc440spe_dma2_prep_pq()
2526 unsigned int src_cnt, const unsigned char *scf, ppc440spe_adma_prep_dma_pq()
2536 dst, src, src_cnt)); ppc440spe_adma_prep_dma_pq()
2539 BUG_ON(!src_cnt); ppc440spe_adma_prep_dma_pq()
2541 if (src_cnt == 1 && dst[1] == src[0]) { ppc440spe_adma_prep_dma_pq()
2549 dest, 2, src, src_cnt, scf, len, flags); ppc440spe_adma_prep_dma_pq()
2553 if (src_cnt == 2 && dst[1] == src[1]) { ppc440spe_adma_prep_dma_pq()
2574 "ppc440spe adma%d: %s src_cnt: %d len: %u int_en: %d\n", ppc440spe_adma_prep_dma_pq()
2575 ppc440spe_chan->device->id, __func__, src_cnt, len, ppc440spe_adma_prep_dma_pq()
2582 dst, dst_cnt, src, src_cnt, scf, ppc440spe_adma_prep_dma_pq()
2588 dst, dst_cnt, src, src_cnt, scf, ppc440spe_adma_prep_dma_pq()
2602 unsigned int src_cnt, const unsigned char *scf, size_t len, ppc440spe_adma_prep_dma_pqzero_sum()
2623 src, src_cnt, scf)); ppc440spe_adma_prep_dma_pqzero_sum()
2633 slot_cnt = src_cnt + dst_cnt * 2; ppc440spe_adma_prep_dma_pqzero_sum()
2640 ppc440spe_desc_init_dma01pqzero_sum(sw_desc, dst_cnt, src_cnt); ppc440spe_adma_prep_dma_pqzero_sum()
2662 iter->src_cnt = 0; ppc440spe_adma_prep_dma_pqzero_sum()
2694 iter->src_cnt = 0; ppc440spe_adma_prep_dma_pqzero_sum()
2758 src[src_cnt - 1]); ppc440spe_adma_prep_dma_pqzero_sum()
2765 scf[src_cnt - 1]); ppc440spe_adma_prep_dma_pqzero_sum()
2767 if (!(--src_cnt)) ppc440spe_adma_prep_dma_pqzero_sum()
2780 struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, ppc440spe_adma_prep_dma_xor_zero_sum()
2792 src_cnt - 1, 0, len, ppc440spe_adma_prep_dma_xor_zero_sum()
3102 int src_cnt) ppc440spe_desc_set_xor_src_cnt()
3107 hw_desc->cbc |= src_cnt; ppc440spe_desc_set_xor_src_cnt()
3240 struct ppc440spe_rxor *cursor, int index, int src_cnt) ppc440spe_adma_dma2rxor_inc_addr()
3243 if (index == src_cnt - 1) { ppc440spe_adma_dma2rxor_inc_addr()
3258 int src_cnt, u32 addr) ppc440spe_adma_dma2rxor_prep_src()
3277 if (index == src_cnt-1) { ppc440spe_adma_dma2rxor_prep_src()
3282 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3289 if (index == src_cnt-1) { ppc440spe_adma_dma2rxor_prep_src()
3294 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3306 if (index == src_cnt-2 || (sign == -1 ppc440spe_adma_dma2rxor_prep_src()
3315 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3322 if (index == src_cnt-1) { ppc440spe_adma_dma2rxor_prep_src()
3324 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3332 if (index == src_cnt-1) { ppc440spe_adma_dma2rxor_prep_src()
3334 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3342 if (index == src_cnt-1) { ppc440spe_adma_dma2rxor_prep_src()
3344 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3354 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
3363 desc, cursor, index, src_cnt); ppc440spe_adma_dma2rxor_prep_src()
192 prep_dma_xor_dbg(int id, dma_addr_t dst, dma_addr_t *src, unsigned int src_cnt) prep_dma_xor_dbg() argument
203 prep_dma_pq_dbg(int id, dma_addr_t *dst, dma_addr_t *src, unsigned int src_cnt) prep_dma_pq_dbg() argument
216 prep_dma_pqzero_sum_dbg(int id, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf) prep_dma_pqzero_sum_dbg() argument
283 ppc440spe_desc_init_xor(struct ppc440spe_adma_desc_slot *desc, int src_cnt, unsigned long flags) ppc440spe_desc_init_xor() argument
303 ppc440spe_desc_init_dma2pq(struct ppc440spe_adma_desc_slot *desc, int dst_cnt, int src_cnt, unsigned long flags) ppc440spe_desc_init_dma2pq() argument
329 ppc440spe_desc_init_dma01pq(struct ppc440spe_adma_desc_slot *desc, int dst_cnt, int src_cnt, unsigned long flags, unsigned long op) ppc440spe_desc_init_dma01pq() argument
442 ppc440spe_desc_init_dma01pqzero_sum( struct ppc440spe_adma_desc_slot *desc, int dst_cnt, int src_cnt) ppc440spe_desc_init_dma01pqzero_sum() argument
826 ppc440spe_dma2_pq_slot_count(dma_addr_t *srcs, int src_cnt, size_t len) ppc440spe_dma2_pq_slot_count() argument
1340 ppc440spe_adma_estimate(struct dma_chan *chan, enum dma_transaction_type cap, struct page **dst_lst, int dst_cnt, struct page **src_lst, int src_cnt, size_t src_sz) ppc440spe_adma_estimate() argument
1382 ppc440spe_async_tx_find_best_channel(enum dma_transaction_type cap, struct page **dst_lst, int dst_cnt, struct page **src_lst, int src_cnt, size_t src_sz) ppc440spe_async_tx_find_best_channel() argument
2033 ppc440spe_adma_prep_dma_xor( struct dma_chan *chan, dma_addr_t dma_dest, dma_addr_t *dma_src, u32 src_cnt, size_t len, unsigned long flags) ppc440spe_adma_prep_dma_xor() argument
2083 ppc440spe_adma_init_dma2rxor_slot( struct ppc440spe_adma_desc_slot *desc, dma_addr_t *src, int src_cnt) ppc440spe_adma_init_dma2rxor_slot() argument
2100 ppc440spe_dma01_prep_mult( struct ppc440spe_adma_chan *ppc440spe_chan, dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_dma01_prep_mult() argument
2186 ppc440spe_dma01_prep_sum_product( struct ppc440spe_adma_chan *ppc440spe_chan, dma_addr_t *dst, dma_addr_t *src, int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_dma01_prep_sum_product() argument
2290 ppc440spe_dma01_prep_pq( struct ppc440spe_adma_chan *ppc440spe_chan, dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_dma01_prep_pq() argument
2435 ppc440spe_dma2_prep_pq( struct ppc440spe_adma_chan *ppc440spe_chan, dma_addr_t *dst, int dst_cnt, dma_addr_t *src, int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_dma2_prep_pq() argument
2524 ppc440spe_adma_prep_dma_pq( struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) ppc440spe_adma_prep_dma_pq() argument
2600 ppc440spe_adma_prep_dma_pqzero_sum( struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, enum sum_check_flags *pqres, unsigned long flags) ppc440spe_adma_prep_dma_pqzero_sum() argument
2779 ppc440spe_adma_prep_dma_xor_zero_sum( struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt, size_t len, enum sum_check_flags *result, unsigned long flags) ppc440spe_adma_prep_dma_xor_zero_sum() argument
3100 ppc440spe_desc_set_xor_src_cnt( struct ppc440spe_adma_desc_slot *desc, int src_cnt) ppc440spe_desc_set_xor_src_cnt() argument
3238 ppc440spe_adma_dma2rxor_inc_addr( struct ppc440spe_adma_desc_slot *desc, struct ppc440spe_rxor *cursor, int index, int src_cnt) ppc440spe_adma_dma2rxor_inc_addr() argument
3255 ppc440spe_adma_dma2rxor_prep_src( struct ppc440spe_adma_desc_slot *hdesc, struct ppc440spe_rxor *cursor, int index, int src_cnt, u32 addr) ppc440spe_adma_dma2rxor_prep_src() argument
H A Dadma.h136 * @src_cnt: number of sources set in this descriptor
159 u8 src_cnt; member in struct:ppc440spe_adma_desc_slot
/linux-4.1.27/drivers/dma/
H A Diop-adma.c550 dma_addr_t *dma_src, unsigned int src_cnt, size_t len, iop_adma_prep_dma_xor()
562 "%s src_cnt: %d len: %u flags: %lx\n", iop_adma_prep_dma_xor()
563 __func__, src_cnt, len, flags); iop_adma_prep_dma_xor()
566 slot_cnt = iop_chan_xor_slot_count(len, src_cnt, &slots_per_op); iop_adma_prep_dma_xor()
570 iop_desc_init_xor(grp_start, src_cnt, flags); iop_adma_prep_dma_xor()
574 while (src_cnt--) iop_adma_prep_dma_xor()
575 iop_desc_set_xor_src_addr(grp_start, src_cnt, iop_adma_prep_dma_xor()
576 dma_src[src_cnt]); iop_adma_prep_dma_xor()
585 unsigned int src_cnt, size_t len, u32 *result, iop_adma_prep_dma_xor_val()
595 dev_dbg(iop_chan->device->common.dev, "%s src_cnt: %d len: %u\n", iop_adma_prep_dma_xor_val()
596 __func__, src_cnt, len); iop_adma_prep_dma_xor_val()
599 slot_cnt = iop_chan_zero_sum_slot_count(len, src_cnt, &slots_per_op); iop_adma_prep_dma_xor_val()
603 iop_desc_init_zero_sum(grp_start, src_cnt, flags); iop_adma_prep_dma_xor_val()
609 while (src_cnt--) iop_adma_prep_dma_xor_val()
610 iop_desc_set_zero_sum_src_addr(grp_start, src_cnt, iop_adma_prep_dma_xor_val()
611 dma_src[src_cnt]); iop_adma_prep_dma_xor_val()
620 unsigned int src_cnt, const unsigned char *scf, size_t len, iop_adma_prep_dma_pq()
633 "%s src_cnt: %d len: %u flags: %lx\n", iop_adma_prep_dma_pq()
634 __func__, src_cnt, len, flags); iop_adma_prep_dma_pq()
637 continue_srcs = 1+src_cnt; iop_adma_prep_dma_pq()
639 continue_srcs = 3+src_cnt; iop_adma_prep_dma_pq()
641 continue_srcs = 0+src_cnt; iop_adma_prep_dma_pq()
661 for (i = 0; i < src_cnt; i++) iop_adma_prep_dma_pq()
684 unsigned int src_cnt, const unsigned char *scf, iop_adma_prep_dma_pq_val()
696 dev_dbg(iop_chan->device->common.dev, "%s src_cnt: %d len: %u\n", iop_adma_prep_dma_pq_val()
697 __func__, src_cnt, len); iop_adma_prep_dma_pq_val()
700 slot_cnt = iop_chan_pq_zero_sum_slot_count(len, src_cnt + 2, &slots_per_op); iop_adma_prep_dma_pq_val()
706 int pq_idx = src_cnt; iop_adma_prep_dma_pq_val()
709 iop_desc_init_pq_zero_sum(g, src_cnt+2, flags); iop_adma_prep_dma_pq_val()
715 while (src_cnt--) iop_adma_prep_dma_pq_val()
716 iop_desc_set_pq_zero_sum_src_addr(g, src_cnt, iop_adma_prep_dma_pq_val()
717 src[src_cnt], iop_adma_prep_dma_pq_val()
718 scf[src_cnt]); iop_adma_prep_dma_pq_val()
549 iop_adma_prep_dma_xor(struct dma_chan *chan, dma_addr_t dma_dest, dma_addr_t *dma_src, unsigned int src_cnt, size_t len, unsigned long flags) iop_adma_prep_dma_xor() argument
584 iop_adma_prep_dma_xor_val(struct dma_chan *chan, dma_addr_t *dma_src, unsigned int src_cnt, size_t len, u32 *result, unsigned long flags) iop_adma_prep_dma_xor_val() argument
619 iop_adma_prep_dma_pq(struct dma_chan *chan, dma_addr_t *dst, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) iop_adma_prep_dma_pq() argument
683 iop_adma_prep_dma_pq_val(struct dma_chan *chan, dma_addr_t *pq, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, enum sum_check_flags *pqres, unsigned long flags) iop_adma_prep_dma_pq_val() argument
H A Dfsl_raid.c326 unsigned int src_cnt, const unsigned char *scf, size_t len, fsl_re_prep_dma_genq()
335 unsigned int save_src_cnt = src_cnt; fsl_re_prep_dma_genq()
351 src_cnt += 1; fsl_re_prep_dma_genq()
356 cdb |= (src_cnt - 1) << FSL_RE_CDB_NRCS_SHIFT; fsl_re_prep_dma_genq()
371 for (i = 0; i < src_cnt; i++) fsl_re_prep_dma_genq()
401 unsigned int src_cnt, size_t len, unsigned long flags) fsl_re_prep_dma_xor()
404 return fsl_re_prep_dma_genq(chan, dest, src, src_cnt, NULL, len, flags); fsl_re_prep_dma_xor()
413 unsigned int src_cnt, const unsigned char *scf, size_t len, fsl_re_prep_dma_pq()
423 unsigned int save_src_cnt = src_cnt; fsl_re_prep_dma_pq()
437 if (src_cnt == 1) { fsl_re_prep_dma_pq()
463 return fsl_re_prep_dma_genq(chan, dest[1], src, src_cnt, fsl_re_prep_dma_pq()
467 src_cnt += 3; fsl_re_prep_dma_pq()
475 cdb |= (src_cnt - 1) << FSL_RE_CDB_NRCS_SHIFT; fsl_re_prep_dma_pq()
485 for (i = 0; i < src_cnt; i++) fsl_re_prep_dma_pq()
489 gfmq_len = ALIGN(src_cnt, 4); fsl_re_prep_dma_pq()
493 for (i = 0; i < src_cnt; i++) fsl_re_prep_dma_pq()
510 if (src_cnt - save_src_cnt == 3) { fsl_re_prep_dma_pq()
324 fsl_re_prep_dma_genq( struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) fsl_re_prep_dma_genq() argument
399 fsl_re_prep_dma_xor( struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) fsl_re_prep_dma_xor() argument
411 fsl_re_prep_dma_pq( struct dma_chan *chan, dma_addr_t *dest, dma_addr_t *src, unsigned int src_cnt, const unsigned char *scf, size_t len, unsigned long flags) fsl_re_prep_dma_pq() argument
H A Ddmatest.c416 int src_cnt; dmatest_func() local
433 src_cnt = dst_cnt = 1; dmatest_func()
436 src_cnt = min_odd(params->xor_sources | 1, dev->max_xor); dmatest_func()
440 src_cnt = min_odd(params->pq_sources | 1, dma_maxpq(dev, 0)); dmatest_func()
447 for (i = 0; i < src_cnt; i++) dmatest_func()
452 thread->srcs = kcalloc(src_cnt+1, sizeof(u8 *), GFP_KERNEL); dmatest_func()
455 for (i = 0; i < src_cnt; i++) { dmatest_func()
484 dma_addr_t srcs[src_cnt]; dmatest_func()
532 um = dmaengine_get_unmap_data(dev->dev, src_cnt+dst_cnt, dmatest_func()
542 for (i = 0; i < src_cnt; i++) { dmatest_func()
561 dsts = &um->addr[src_cnt]; dmatest_func()
587 srcs, src_cnt, dmatest_func()
595 src_cnt, pq_coefs, dmatest_func()
H A Dxgene-dma.c405 static u8 xgene_dma_encode_xor_flyby(u32 src_cnt) xgene_dma_encode_xor_flyby() argument
416 return flyby_type[src_cnt]; xgene_dma_encode_xor_flyby()
510 u32 src_cnt, size_t *nbytes, xgene_dma_prep_xor_desc()
531 XGENE_DMA_DESC_FLYBY_SET(desc1, xgene_dma_encode_xor_flyby(src_cnt)); xgene_dma_prep_xor_desc()
534 for (i = 0; i < src_cnt; i++) { xgene_dma_prep_xor_desc()
1102 u32 src_cnt, size_t len, unsigned long flags) xgene_dma_prep_xor()
1122 src_cnt, &len, multi); xgene_dma_prep_xor()
1150 u32 src_cnt, const u8 *scf, size_t len, unsigned long flags) xgene_dma_prep_pq()
1168 memcpy(_src, src, sizeof(*src) * src_cnt); xgene_dma_prep_pq()
1197 src_cnt, &len, multi); xgene_dma_prep_pq()
1207 src_cnt, &_len, scf); xgene_dma_prep_pq()
507 xgene_dma_prep_xor_desc(struct xgene_dma_chan *chan, struct xgene_dma_desc_sw *desc_sw, dma_addr_t *dst, dma_addr_t *src, u32 src_cnt, size_t *nbytes, const u8 *scf) xgene_dma_prep_xor_desc() argument
1100 xgene_dma_prep_xor( struct dma_chan *dchan, dma_addr_t dst, dma_addr_t *src, u32 src_cnt, size_t len, unsigned long flags) xgene_dma_prep_xor() argument
1148 xgene_dma_prep_pq( struct dma_chan *dchan, dma_addr_t *dst, dma_addr_t *src, u32 src_cnt, const u8 *scf, size_t len, unsigned long flags) xgene_dma_prep_pq() argument
H A Dmv_xor.c512 unsigned int src_cnt, size_t len, unsigned long flags) mv_xor_prep_dma_xor()
523 "%s src_cnt: %d len: %u dest %pad flags: %ld\n", mv_xor_prep_dma_xor()
524 __func__, src_cnt, len, &dest, flags); mv_xor_prep_dma_xor() local
532 while (src_cnt--) mv_xor_prep_dma_xor()
533 mv_desc_set_src_addr(sw_desc, src_cnt, src[src_cnt]); mv_xor_prep_dma_xor()
511 mv_xor_prep_dma_xor(struct dma_chan *chan, dma_addr_t dest, dma_addr_t *src, unsigned int src_cnt, size_t len, unsigned long flags) mv_xor_prep_dma_xor() argument
/linux-4.1.27/arch/arm/mach-iop13xx/include/mach/
H A Dadma.h201 iop_chan_xor_slot_count(size_t len, int src_cnt, int *slots_per_op) iop_chan_xor_slot_count() argument
208 *slots_per_op = slot_count_table[src_cnt - 1]; iop_chan_xor_slot_count()
278 iop_desc_init_xor(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_xor() argument
288 u_desc_ctrl.field.src_select = src_cnt - 1; iop_desc_init_xor()
299 iop_desc_init_zero_sum(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_zero_sum() argument
309 u_desc_ctrl.field.src_select = src_cnt - 1; iop_desc_init_zero_sum()
321 iop_desc_init_pq(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_pq() argument
331 u_desc_ctrl.field.src_select = src_cnt - 1; iop_desc_init_pq()
340 iop_desc_init_pq_zero_sum(struct iop_adma_desc_slot *desc, int src_cnt, iop_desc_init_pq_zero_sum() argument
350 u_desc_ctrl.field.src_select = src_cnt - 1; iop_desc_init_pq_zero_sum()
/linux-4.1.27/include/linux/
H A Dasync_tx.h176 int src_cnt, size_t len, struct async_submit_ctl *submit);
180 int src_cnt, size_t len, enum sum_check_flags *result,
191 async_gen_syndrome(struct page **blocks, unsigned int offset, int src_cnt,
195 async_syndrome_val(struct page **blocks, unsigned int offset, int src_cnt,
H A Ddmaengine.h641 unsigned int src_cnt, size_t len, unsigned long flags);
643 struct dma_chan *chan, dma_addr_t *src, unsigned int src_cnt,
647 unsigned int src_cnt, const unsigned char *scf,
651 unsigned int src_cnt, const unsigned char *scf, size_t len,
/linux-4.1.27/fs/btrfs/
H A Draid56.c513 static void run_xor(void **pages, int src_cnt, ssize_t len) run_xor() argument
517 void *dest = pages[src_cnt]; run_xor()
519 while(src_cnt > 0) { run_xor()
520 xor_src_cnt = min(src_cnt, MAX_XOR_BLOCKS); run_xor()
523 src_cnt -= xor_src_cnt; run_xor()
/linux-4.1.27/drivers/crypto/
H A Dhifn_795x.c579 volatile __le16 src_cnt; /* 15:0 of source count */ member in struct:hifn_base_result

Completed in 1264 milliseconds