n_rdma 925 drivers/infiniband/ulp/srpt/ib_srpt.c ioctx->n_rdma += ret; n_rdma 1232 drivers/infiniband/ulp/srpt/ib_srpt.c ioctx->n_rdma = 0; n_rdma 1329 drivers/infiniband/ulp/srpt/ib_srpt.c WARN_ON(ioctx->n_rdma <= 0); n_rdma 1330 drivers/infiniband/ulp/srpt/ib_srpt.c atomic_add(ioctx->n_rdma, &ch->sq_wr_avail); n_rdma 1331 drivers/infiniband/ulp/srpt/ib_srpt.c ioctx->n_rdma = 0; n_rdma 1757 drivers/infiniband/ulp/srpt/ib_srpt.c atomic_add(1 + ioctx->n_rdma, &ch->sq_wr_avail); n_rdma 2743 drivers/infiniband/ulp/srpt/ib_srpt.c if (atomic_sub_return(ioctx->n_rdma, &ch->sq_wr_avail) < 0) { n_rdma 2745 drivers/infiniband/ulp/srpt/ib_srpt.c __func__, ioctx->n_rdma); n_rdma 2762 drivers/infiniband/ulp/srpt/ib_srpt.c __func__, ret, ioctx->n_rdma, n_rdma 2769 drivers/infiniband/ulp/srpt/ib_srpt.c atomic_add(ioctx->n_rdma, &ch->sq_wr_avail); n_rdma 2847 drivers/infiniband/ulp/srpt/ib_srpt.c if (unlikely(atomic_sub_return(1 + ioctx->n_rdma, n_rdma 2850 drivers/infiniband/ulp/srpt/ib_srpt.c __func__, ioctx->n_rdma); n_rdma 2880 drivers/infiniband/ulp/srpt/ib_srpt.c atomic_add(1 + ioctx->n_rdma, &ch->sq_wr_avail); n_rdma 231 drivers/infiniband/ulp/srpt/ib_srpt.h u8 n_rdma; n_rdma 62 drivers/nvme/target/rdma.c u8 n_rdma; n_rdma 503 drivers/nvme/target/rdma.c atomic_add(1 + rsp->n_rdma, &queue->sq_wr_avail); n_rdma 505 drivers/nvme/target/rdma.c if (rsp->n_rdma) { n_rdma 588 drivers/nvme/target/rdma.c WARN_ON(rsp->n_rdma <= 0); n_rdma 589 drivers/nvme/target/rdma.c atomic_add(rsp->n_rdma, &queue->sq_wr_avail); n_rdma 593 drivers/nvme/target/rdma.c rsp->n_rdma = 0; n_rdma 683 drivers/nvme/target/rdma.c rsp->n_rdma += ret; n_rdma 735 drivers/nvme/target/rdma.c if (unlikely(atomic_sub_return(1 + rsp->n_rdma, n_rdma 738 drivers/nvme/target/rdma.c 1 + rsp->n_rdma, queue->idx, n_rdma 740 drivers/nvme/target/rdma.c atomic_add(1 + rsp->n_rdma, &queue->sq_wr_avail); n_rdma 826 drivers/nvme/target/rdma.c rsp->n_rdma = 0;