Lines Matching refs:op
72 struct rm_data_op *op, in rds_ib_send_unmap_data() argument
75 if (op->op_nents) in rds_ib_send_unmap_data()
77 op->op_sg, op->op_nents, in rds_ib_send_unmap_data()
82 struct rm_rdma_op *op, in rds_ib_send_unmap_rdma() argument
85 if (op->op_mapped) { in rds_ib_send_unmap_rdma()
87 op->op_sg, op->op_nents, in rds_ib_send_unmap_rdma()
88 op->op_write ? DMA_TO_DEVICE : DMA_FROM_DEVICE); in rds_ib_send_unmap_rdma()
89 op->op_mapped = 0; in rds_ib_send_unmap_rdma()
112 rds_ib_send_complete(container_of(op, struct rds_message, rdma), in rds_ib_send_unmap_rdma()
115 if (op->op_write) in rds_ib_send_unmap_rdma()
116 rds_stats_add(s_send_rdma_bytes, op->op_bytes); in rds_ib_send_unmap_rdma()
118 rds_stats_add(s_recv_rdma_bytes, op->op_bytes); in rds_ib_send_unmap_rdma()
122 struct rm_atomic_op *op, in rds_ib_send_unmap_atomic() argument
126 if (op->op_mapped) { in rds_ib_send_unmap_atomic()
127 ib_dma_unmap_sg(ic->i_cm_id->device, op->op_sg, 1, in rds_ib_send_unmap_atomic()
129 op->op_mapped = 0; in rds_ib_send_unmap_atomic()
132 rds_ib_send_complete(container_of(op, struct rds_message, atomic), in rds_ib_send_unmap_atomic()
135 if (op->op_type == RDS_ATOMIC_TYPE_CSWP) in rds_ib_send_unmap_atomic()
754 int rds_ib_xmit_atomic(struct rds_connection *conn, struct rm_atomic_op *op) in rds_ib_xmit_atomic() argument
779 if (op->op_type == RDS_ATOMIC_TYPE_CSWP) { in rds_ib_xmit_atomic()
781 send->s_atomic_wr.compare_add = op->op_m_cswp.compare; in rds_ib_xmit_atomic()
782 send->s_atomic_wr.swap = op->op_m_cswp.swap; in rds_ib_xmit_atomic()
783 send->s_atomic_wr.compare_add_mask = op->op_m_cswp.compare_mask; in rds_ib_xmit_atomic()
784 send->s_atomic_wr.swap_mask = op->op_m_cswp.swap_mask; in rds_ib_xmit_atomic()
787 send->s_atomic_wr.compare_add = op->op_m_fadd.add; in rds_ib_xmit_atomic()
789 send->s_atomic_wr.compare_add_mask = op->op_m_fadd.nocarry_mask; in rds_ib_xmit_atomic()
792 nr_sig = rds_ib_set_wr_signal_state(ic, send, op->op_notify); in rds_ib_xmit_atomic()
795 send->s_atomic_wr.remote_addr = op->op_remote_addr; in rds_ib_xmit_atomic()
796 send->s_atomic_wr.rkey = op->op_rkey; in rds_ib_xmit_atomic()
797 send->s_op = op; in rds_ib_xmit_atomic()
801 ret = ib_dma_map_sg(ic->i_cm_id->device, op->op_sg, 1, DMA_FROM_DEVICE); in rds_ib_xmit_atomic()
802 rdsdebug("ic %p mapping atomic op %p. mapped %d pg\n", ic, op, ret); in rds_ib_xmit_atomic()
811 send->s_sge[0].addr = ib_sg_dma_address(ic->i_cm_id->device, op->op_sg); in rds_ib_xmit_atomic()
812 send->s_sge[0].length = ib_sg_dma_len(ic->i_cm_id->device, op->op_sg); in rds_ib_xmit_atomic()
815 rdsdebug("rva %Lx rpa %Lx len %u\n", op->op_remote_addr, in rds_ib_xmit_atomic()
843 int rds_ib_xmit_rdma(struct rds_connection *conn, struct rm_rdma_op *op) in rds_ib_xmit_rdma() argument
852 u64 remote_addr = op->op_remote_addr; in rds_ib_xmit_rdma()
864 if (!op->op_mapped) { in rds_ib_xmit_rdma()
865 op->op_count = ib_dma_map_sg(ic->i_cm_id->device, in rds_ib_xmit_rdma()
866 op->op_sg, op->op_nents, (op->op_write) ? in rds_ib_xmit_rdma()
868 rdsdebug("ic %p mapping op %p: %d\n", ic, op, op->op_count); in rds_ib_xmit_rdma()
869 if (op->op_count == 0) { in rds_ib_xmit_rdma()
875 op->op_mapped = 1; in rds_ib_xmit_rdma()
882 i = ceil(op->op_count, max_sge); in rds_ib_xmit_rdma()
895 scat = &op->op_sg[0]; in rds_ib_xmit_rdma()
897 num_sge = op->op_count; in rds_ib_xmit_rdma()
899 for (i = 0; i < work_alloc && scat != &op->op_sg[op->op_count]; i++) { in rds_ib_xmit_rdma()
904 nr_sig += rds_ib_set_wr_signal_state(ic, send, op->op_notify); in rds_ib_xmit_rdma()
906 send->s_wr.opcode = op->op_write ? IB_WR_RDMA_WRITE : IB_WR_RDMA_READ; in rds_ib_xmit_rdma()
908 send->s_rdma_wr.rkey = op->op_rkey; in rds_ib_xmit_rdma()
923 scat != &op->op_sg[op->op_count]; j++) { in rds_ib_xmit_rdma()
948 if (scat == &op->op_sg[op->op_count]) { in rds_ib_xmit_rdma()
949 prev->s_op = op; in rds_ib_xmit_rdma()
950 rds_message_addref(container_of(op, struct rds_message, rdma)); in rds_ib_xmit_rdma()