Lines Matching refs:sge

171 	struct qib_sge *sge = &ss->sge;  in qib_copy_sge()  local
174 u32 len = sge->length; in qib_copy_sge()
178 if (len > sge->sge_length) in qib_copy_sge()
179 len = sge->sge_length; in qib_copy_sge()
181 memcpy(sge->vaddr, data, len); in qib_copy_sge()
182 sge->vaddr += len; in qib_copy_sge()
183 sge->length -= len; in qib_copy_sge()
184 sge->sge_length -= len; in qib_copy_sge()
185 if (sge->sge_length == 0) { in qib_copy_sge()
187 qib_put_mr(sge->mr); in qib_copy_sge()
189 *sge = *ss->sg_list++; in qib_copy_sge()
190 } else if (sge->length == 0 && sge->mr->lkey) { in qib_copy_sge()
191 if (++sge->n >= QIB_SEGSZ) { in qib_copy_sge()
192 if (++sge->m >= sge->mr->mapsz) in qib_copy_sge()
194 sge->n = 0; in qib_copy_sge()
196 sge->vaddr = in qib_copy_sge()
197 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_copy_sge()
198 sge->length = in qib_copy_sge()
199 sge->mr->map[sge->m]->segs[sge->n].length; in qib_copy_sge()
213 struct qib_sge *sge = &ss->sge; in qib_skip_sge() local
216 u32 len = sge->length; in qib_skip_sge()
220 if (len > sge->sge_length) in qib_skip_sge()
221 len = sge->sge_length; in qib_skip_sge()
223 sge->vaddr += len; in qib_skip_sge()
224 sge->length -= len; in qib_skip_sge()
225 sge->sge_length -= len; in qib_skip_sge()
226 if (sge->sge_length == 0) { in qib_skip_sge()
228 qib_put_mr(sge->mr); in qib_skip_sge()
230 *sge = *ss->sg_list++; in qib_skip_sge()
231 } else if (sge->length == 0 && sge->mr->lkey) { in qib_skip_sge()
232 if (++sge->n >= QIB_SEGSZ) { in qib_skip_sge()
233 if (++sge->m >= sge->mr->mapsz) in qib_skip_sge()
235 sge->n = 0; in qib_skip_sge()
237 sge->vaddr = in qib_skip_sge()
238 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_skip_sge()
239 sge->length = in qib_skip_sge()
240 sge->mr->map[sge->m]->segs[sge->n].length; in qib_skip_sge()
254 struct qib_sge sge = ss->sge; in qib_count_sge() local
259 u32 len = sge.length; in qib_count_sge()
263 if (len > sge.sge_length) in qib_count_sge()
264 len = sge.sge_length; in qib_count_sge()
266 if (((long) sge.vaddr & (sizeof(u32) - 1)) || in qib_count_sge()
272 sge.vaddr += len; in qib_count_sge()
273 sge.length -= len; in qib_count_sge()
274 sge.sge_length -= len; in qib_count_sge()
275 if (sge.sge_length == 0) { in qib_count_sge()
277 sge = *sg_list++; in qib_count_sge()
278 } else if (sge.length == 0 && sge.mr->lkey) { in qib_count_sge()
279 if (++sge.n >= QIB_SEGSZ) { in qib_count_sge()
280 if (++sge.m >= sge.mr->mapsz) in qib_count_sge()
282 sge.n = 0; in qib_count_sge()
284 sge.vaddr = in qib_count_sge()
285 sge.mr->map[sge.m]->segs[sge.n].vaddr; in qib_count_sge()
286 sge.length = in qib_count_sge()
287 sge.mr->map[sge.m]->segs[sge.n].length; in qib_count_sge()
299 struct qib_sge *sge = &ss->sge; in qib_copy_from_sge() local
302 u32 len = sge->length; in qib_copy_from_sge()
306 if (len > sge->sge_length) in qib_copy_from_sge()
307 len = sge->sge_length; in qib_copy_from_sge()
309 memcpy(data, sge->vaddr, len); in qib_copy_from_sge()
310 sge->vaddr += len; in qib_copy_from_sge()
311 sge->length -= len; in qib_copy_from_sge()
312 sge->sge_length -= len; in qib_copy_from_sge()
313 if (sge->sge_length == 0) { in qib_copy_from_sge()
315 *sge = *ss->sg_list++; in qib_copy_from_sge()
316 } else if (sge->length == 0 && sge->mr->lkey) { in qib_copy_from_sge()
317 if (++sge->n >= QIB_SEGSZ) { in qib_copy_from_sge()
318 if (++sge->m >= sge->mr->mapsz) in qib_copy_from_sge()
320 sge->n = 0; in qib_copy_from_sge()
322 sge->vaddr = in qib_copy_from_sge()
323 sge->mr->map[sge->m]->segs[sge->n].vaddr; in qib_copy_from_sge()
324 sge->length = in qib_copy_from_sge()
325 sge->mr->map[sge->m]->segs[sge->n].length; in qib_copy_from_sge()
438 struct qib_sge *sge = &wqe->sg_list[--j]; in qib_post_one_send() local
440 qib_put_mr(sge->mr); in qib_post_one_send()
737 struct qib_sge *sge = &ss->sge; in update_sge() local
739 sge->vaddr += length; in update_sge()
740 sge->length -= length; in update_sge()
741 sge->sge_length -= length; in update_sge()
742 if (sge->sge_length == 0) { in update_sge()
744 *sge = *ss->sg_list++; in update_sge()
745 } else if (sge->length == 0 && sge->mr->lkey) { in update_sge()
746 if (++sge->n >= QIB_SEGSZ) { in update_sge()
747 if (++sge->m >= sge->mr->mapsz) in update_sge()
749 sge->n = 0; in update_sge()
751 sge->vaddr = sge->mr->map[sge->m]->segs[sge->n].vaddr; in update_sge()
752 sge->length = sge->mr->map[sge->m]->segs[sge->n].length; in update_sge()
800 u32 len = ss->sge.length; in copy_io()
805 if (len > ss->sge.sge_length) in copy_io()
806 len = ss->sge.sge_length; in copy_io()
809 off = (unsigned long)ss->sge.vaddr & (sizeof(u32) - 1); in copy_io()
811 u32 *addr = (u32 *)((unsigned long)ss->sge.vaddr & in copy_io()
842 u32 *addr = (u32 *) ss->sge.vaddr; in copy_io()
895 qib_pio_copy(piobuf, ss->sge.vaddr, w - 1); in copy_io()
897 last = ((u32 *) ss->sge.vaddr)[w - 1]; in copy_io()
902 qib_pio_copy(piobuf, ss->sge.vaddr, w); in copy_io()
907 u32 v = ((u32 *) ss->sge.vaddr)[w]; in copy_io()
1325 if (likely(ss->num_sge == 1 && len <= ss->sge.length && in qib_verbs_send_pio()
1326 !((unsigned long)ss->sge.vaddr & (sizeof(u32) - 1)))) { in qib_verbs_send_pio()
1327 u32 *addr = (u32 *) ss->sge.vaddr; in qib_verbs_send_pio()