Lines Matching refs:ib_dev

97 isert_query_device(struct ib_device *ib_dev, struct ib_device_attr *devattr)  in isert_query_device()  argument
101 ret = ib_query_device(ib_dev, devattr); in isert_query_device()
212 struct ib_device *ib_dev = device->ib_device; in isert_alloc_rx_descriptors() local
226 dma_addr = ib_dma_map_single(ib_dev, (void *)rx_desc, in isert_alloc_rx_descriptors()
228 if (ib_dma_mapping_error(ib_dev, dma_addr)) in isert_alloc_rx_descriptors()
246 ib_dma_unmap_single(ib_dev, rx_desc->dma_addr, in isert_alloc_rx_descriptors()
260 struct ib_device *ib_dev = isert_conn->device->ib_device; in isert_free_rx_descriptors() local
269 ib_dma_unmap_single(ib_dev, rx_desc->dma_addr, in isert_free_rx_descriptors()
659 struct ib_device *ib_dev = isert_conn->device->ib_device; in isert_free_login_buf() local
661 ib_dma_unmap_single(ib_dev, isert_conn->login_rsp_dma, in isert_free_login_buf()
663 ib_dma_unmap_single(ib_dev, isert_conn->login_req_dma, in isert_free_login_buf()
671 struct ib_device *ib_dev) in isert_alloc_login_buf() argument
690 isert_conn->login_req_dma = ib_dma_map_single(ib_dev, in isert_alloc_login_buf()
694 ret = ib_dma_mapping_error(ib_dev, isert_conn->login_req_dma); in isert_alloc_login_buf()
701 isert_conn->login_rsp_dma = ib_dma_map_single(ib_dev, in isert_alloc_login_buf()
705 ret = ib_dma_mapping_error(ib_dev, isert_conn->login_rsp_dma); in isert_alloc_login_buf()
715 ib_dma_unmap_single(ib_dev, isert_conn->login_req_dma, in isert_alloc_login_buf()
1044 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_post_send() local
1048 ib_dma_sync_single_for_device(ib_dev, tx_desc->dma_addr, in isert_post_send()
1071 struct ib_device *ib_dev = device->ib_device; in isert_create_send_desc() local
1073 ib_dma_sync_single_for_cpu(ib_dev, tx_desc->dma_addr, in isert_create_send_desc()
1093 struct ib_device *ib_dev = device->ib_device; in isert_init_tx_hdrs() local
1096 dma_addr = ib_dma_map_single(ib_dev, (void *)tx_desc, in isert_init_tx_hdrs()
1098 if (ib_dma_mapping_error(ib_dev, dma_addr)) { in isert_init_tx_hdrs()
1165 struct ib_device *ib_dev = device->ib_device; in isert_put_login_tx() local
1179 ib_dma_sync_single_for_cpu(ib_dev, isert_conn->login_rsp_dma, in isert_put_login_tx()
1184 ib_dma_sync_single_for_device(ib_dev, isert_conn->login_rsp_dma, in isert_put_login_tx()
1589 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_rcv_completion() local
1606 ib_dma_sync_single_for_cpu(ib_dev, rx_dma, rx_buflen, DMA_FROM_DEVICE); in isert_rcv_completion()
1628 ib_dma_sync_single_for_device(ib_dev, rx_dma, rx_buflen, in isert_rcv_completion()
1655 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_map_data_buf() local
1670 data->dma_nents = ib_dma_map_sg(ib_dev, data->sg, data->nents, in isert_map_data_buf()
1686 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_unmap_data_buf() local
1688 ib_dma_unmap_sg(ib_dev, data->sg, data->nents, data->dma_dir); in isert_unmap_data_buf()
1826 isert_unmap_tx_desc(struct iser_tx_desc *tx_desc, struct ib_device *ib_dev) in isert_unmap_tx_desc() argument
1830 ib_dma_unmap_single(ib_dev, tx_desc->dma_addr, in isert_unmap_tx_desc()
1838 struct ib_device *ib_dev, bool comp_err) in isert_completion_put() argument
1842 ib_dma_unmap_single(ib_dev, isert_cmd->pdu_buf_dma, in isert_completion_put()
1847 isert_unmap_tx_desc(tx_desc, ib_dev); in isert_completion_put()
1964 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_do_control_comp() local
1976 ib_dev, false); in isert_do_control_comp()
1992 struct ib_device *ib_dev) in isert_response_completion() argument
2000 isert_unmap_tx_desc(tx_desc, ib_dev); in isert_response_completion()
2008 isert_completion_put(tx_desc, isert_cmd, ib_dev, false); in isert_response_completion()
2015 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_snd_completion() local
2020 isert_unmap_tx_desc(tx_desc, ib_dev); in isert_snd_completion()
2030 isert_conn, ib_dev); in isert_snd_completion()
2077 struct ib_device *ib_dev = isert_conn->cm_id->device; in isert_cq_comp_err() local
2084 isert_unmap_tx_desc(desc, ib_dev); in isert_cq_comp_err()
2086 isert_completion_put(desc, isert_cmd, ib_dev, true); in isert_cq_comp_err()
2186 struct ib_device *ib_dev = device->ib_device; in isert_put_response() local
2198 isert_cmd->pdu_buf_dma = ib_dma_map_single(ib_dev, in isert_put_response()
2317 struct ib_device *ib_dev = device->ib_device; in isert_put_reject() local
2327 isert_cmd->pdu_buf_dma = ib_dma_map_single(ib_dev, in isert_put_reject()
2364 struct ib_device *ib_dev = device->ib_device; in isert_put_text_rsp() local
2368 isert_cmd->pdu_buf_dma = ib_dma_map_single(ib_dev, in isert_put_text_rsp()
2392 struct ib_device *ib_dev = device->ib_device; in isert_build_rdma_wr() local
2412 ib_sge->addr = ib_sg_dma_address(ib_dev, tmp_sg) + page_off; in isert_build_rdma_wr()
2414 ib_sg_dma_len(ib_dev, tmp_sg) - page_off); in isert_build_rdma_wr()
2519 isert_map_fr_pagelist(struct ib_device *ib_dev, in isert_map_fr_pagelist() argument
2530 start_addr = ib_sg_dma_address(ib_dev, tmp_sg); in isert_map_fr_pagelist()
2533 end_addr = start_addr + ib_sg_dma_len(ib_dev, tmp_sg); in isert_map_fr_pagelist()
2580 struct ib_device *ib_dev = device->ib_device; in isert_fast_reg_mr() local
2590 sge->addr = ib_sg_dma_address(ib_dev, &mem->sg[0]); in isert_fast_reg_mr()
2591 sge->length = ib_sg_dma_len(ib_dev, &mem->sg[0]); in isert_fast_reg_mr()
2612 pagelist_len = isert_map_fr_pagelist(ib_dev, mem->sg, mem->nents, in isert_fast_reg_mr()