Lines Matching refs:sg
83 struct scatterlist *sg, unsigned int nents);
251 static void rds_iw_set_scatterlist(struct rds_iw_scatterlist *sg, argument
254 sg->list = list;
255 sg->len = sg_len;
256 sg->dma_len = 0;
257 sg->dma_npages = 0;
258 sg->bytes = 0;
262 struct rds_iw_scatterlist *sg) argument
268 WARN_ON(sg->dma_len);
270 sg->dma_len = ib_dma_map_sg(dev, sg->list, sg->len, DMA_BIDIRECTIONAL);
271 if (unlikely(!sg->dma_len)) {
276 sg->bytes = 0;
277 sg->dma_npages = 0;
280 for (i = 0; i < sg->dma_len; ++i) {
281 unsigned int dma_len = ib_sg_dma_len(dev, &sg->list[i]);
282 u64 dma_addr = ib_sg_dma_address(dev, &sg->list[i]);
285 sg->bytes += dma_len;
294 if (i < sg->dma_len - 1)
299 sg->dma_npages += (end_addr - dma_addr) >> PAGE_SHIFT;
303 if (sg->dma_npages > fastreg_message_size)
306 dma_pages = kmalloc(sizeof(u64) * sg->dma_npages, GFP_ATOMIC);
312 for (i = j = 0; i < sg->dma_len; ++i) {
313 unsigned int dma_len = ib_sg_dma_len(dev, &sg->list[i]);
314 u64 dma_addr = ib_sg_dma_address(dev, &sg->list[i]);
321 BUG_ON(j > sg->dma_npages);
327 ib_dma_unmap_sg(rds_iwdev->dev, sg->list, sg->len, DMA_BIDIRECTIONAL);
328 sg->dma_len = 0;
591 void *rds_iw_get_mr(struct scatterlist *sg, unsigned long nents, argument
625 ret = rds_iw_map_fastreg(rds_iwdev->mr_pool, ibmr, sg, nents);
760 struct scatterlist *sg, argument
768 rds_iw_set_scatterlist(&mapping->m_sg, sg, sg_len);