/linux-4.4.14/drivers/infiniband/core/ |
H A D | umem.c | 54 ib_dma_unmap_sg(dev, umem->sg_head.sgl, __ib_umem_release() 58 for_each_sg(umem->sg_head.sgl, sg, umem->npages, i) { __ib_umem_release() 66 sg_free_table(&umem->sg_head); __ib_umem_release() 183 ret = sg_alloc_table(&umem->sg_head, npages, GFP_KERNEL); ib_umem_get() 188 sg_list_start = umem->sg_head.sgl; ib_umem_get() 215 umem->sg_head.sgl, 326 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, i) ib_umem_page_count() 355 ret = sg_pcopy_to_buffer(umem->sg_head.sgl, umem->nmap, dst, length, ib_umem_copy_from()
|
/linux-4.4.14/drivers/infiniband/hw/mlx4/ |
H A D | doorbell.c | 75 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); mlx4_ib_db_map_user()
|
H A D | mr.c | 105 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { mlx4_ib_umem_write_mtt()
|
/linux-4.4.14/drivers/infiniband/hw/mlx5/ |
H A D | doorbell.c | 77 db->dma = sg_dma_address(page->umem->sg_head.sgl) + (virt & ~PAGE_MASK); mlx5_ib_db_map_user()
|
H A D | mem.c | 78 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { mlx5_ib_cont_pages() 180 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { __mlx5_ib_populate_pas()
|
/linux-4.4.14/include/rdma/ |
H A D | ib_umem.h | 55 struct sg_table sg_head; member in struct:ib_umem
|
/linux-4.4.14/drivers/crypto/nx/ |
H A D | nx.c | 80 * @sg_head: pointer to the first scatter list element to build 85 * This function will start writing nx_sg elements at @sg_head and keep 91 struct nx_sg *nx_build_sg_list(struct nx_sg *sg_head, nx_build_sg_list() argument 121 for (sg = sg_head; sg_len < *len; sg++) { nx_build_sg_list() 139 if ((sg - sg_head) == sgmax) { nx_build_sg_list() 148 /* return the moved sg_head pointer */ nx_build_sg_list()
|
/linux-4.4.14/drivers/scsi/ |
H A D | advansys.c | 330 ASC_SG_HEAD *sg_head; member in struct:asc_scsi_q 339 ASC_SG_HEAD *sg_head; member in struct:asc_scsi_bios_req_q 2518 printk(" cdbptr 0x%lx, cdb_len %u, sg_head 0x%lx, sg_queue_cnt %u\n", asc_prt_asc_scsi_q() 2520 (ulong)q->sg_head, q->q1.sg_queue_cnt); asc_prt_asc_scsi_q() 2522 if (q->sg_head) { asc_prt_asc_scsi_q() 2523 sgp = q->sg_head; asc_prt_asc_scsi_q() 7582 asc_sg_head = kzalloc(sizeof(asc_scsi_q->sg_head) + asc_build_req() 7591 asc_scsi_q->sg_head = asc_sg_head; asc_build_req() 8011 ASC_SG_HEAD *sg_head; AscPutReadySgListQueue() local 8023 sg_head = scsiq->sg_head; AscPutReadySgListQueue() 8026 scsiq->q1.data_addr = cpu_to_le32(sg_head->sg_list[0].addr); AscPutReadySgListQueue() 8027 scsiq->q1.data_cnt = cpu_to_le32(sg_head->sg_list[0].bytes); AscPutReadySgListQueue() 8033 sg_entry_cnt = sg_head->entry_cnt - 1; AscPutReadySgListQueue() 8039 scsiq->q1.sg_queue_cnt = sg_head->queue_cnt; AscPutReadySgListQueue() 8042 for (i = 0; i < sg_head->queue_cnt; i++) { AscPutReadySgListQueue() 8084 (uchar *)&sg_head-> AscPutReadySgListQueue() 8119 scsiq->sg_head->queue_cnt = n_q_required - 1; AscSendScsiQueue() 8175 ASC_SG_HEAD *sg_head; AscExeScsiQueue() local 8179 sg_head = scsiq->sg_head; AscExeScsiQueue() 8211 if ((sg_entry_cnt = sg_head->entry_cnt) == 0) { AscExeScsiQueue() 8220 scsiq->q1.data_addr = cpu_to_le32(sg_head->sg_list[0].addr); AscExeScsiQueue() 8221 scsiq->q1.data_cnt = cpu_to_le32(sg_head->sg_list[0].bytes); AscExeScsiQueue() 8233 data_cnt += le32_to_cpu(sg_head->sg_list[i]. AscExeScsiQueue() 8271 addr = le32_to_cpu(sg_head-> AscExeScsiQueue() 8275 le32_to_cpu(sg_head-> AscExeScsiQueue() 8292 le32_to_cpu(sg_head-> AscExeScsiQueue() 8297 sg_head-> AscExeScsiQueue() 8306 sg_head->entry_to_copy = sg_head->entry_cnt; AscExeScsiQueue() 8490 kfree(asc_scsi_q.sg_head); asc_execute_scsi_cmnd()
|
/linux-4.4.14/drivers/staging/rdma/ipath/ |
H A D | ipath_mr.c | 224 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { ipath_reg_user_mr()
|
/linux-4.4.14/drivers/staging/rdma/hfi1/ |
H A D | mr.c | 286 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { hfi1_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/qib/ |
H A D | qib_mr.c | 269 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { qib_reg_user_mr()
|
/linux-4.4.14/drivers/staging/rdma/amso1100/ |
H A D | c2_provider.c | 472 for_each_sg(c2mr->umem->sg_head.sgl, sg, c2mr->umem->nmap, entry) { c2_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/cxgb4/ |
H A D | mem.c | 754 for_each_sg(mhp->umem->sg_head.sgl, sg, mhp->umem->nmap, entry) { c4iw_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/mthca/ |
H A D | mthca_provider.c | 1039 for_each_sg(mr->umem->sg_head.sgl, sg, mr->umem->nmap, entry) { mthca_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/cxgb3/ |
H A D | iwch_provider.c | 671 for_each_sg(mhp->umem->sg_head.sgl, sg, mhp->umem->nmap, entry) { iwch_reg_user_mr()
|
/linux-4.4.14/drivers/staging/rdma/ehca/ |
H A D | ehca_mrmw.c | 403 pginfo.u.usr.next_sg = pginfo.u.usr.region->sg_head.sgl; ehca_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/nes/ |
H A D | nes_verbs.c | 2370 for_each_sg(region->sg_head.sgl, sg, region->nmap, entry) { nes_reg_user_mr() 2600 for_each_sg(region->sg_head.sgl, sg, region->nmap, entry) { nes_reg_user_mr()
|
/linux-4.4.14/drivers/infiniband/hw/ocrdma/ |
H A D | ocrdma_verbs.c | 918 for_each_sg(umem->sg_head.sgl, sg, umem->nmap, entry) { build_user_pbes()
|