Lines Matching refs:va

58 static u32 *decode_read_list(u32 *va, u32 *vaend)  in decode_read_list()  argument
60 struct rpcrdma_read_chunk *ch = (struct rpcrdma_read_chunk *)va; in decode_read_list()
83 static u32 *decode_write_list(u32 *va, u32 *vaend) in decode_write_list() argument
89 (struct rpcrdma_write_array *)va; in decode_write_list()
118 static u32 *decode_reply_array(u32 *va, u32 *vaend) in decode_reply_array() argument
123 (struct rpcrdma_write_array *)va; in decode_reply_array()
152 u32 *va; in svc_rdma_xdr_decode_req() local
182 va = &rmsgp->rm_body.rm_padded.rm_pempty[4]; in svc_rdma_xdr_decode_req()
183 rqstp->rq_arg.head[0].iov_base = va; in svc_rdma_xdr_decode_req()
184 hdrlen = (u32)((unsigned long)va - (unsigned long)rmsgp); in svc_rdma_xdr_decode_req()
194 va = &rmsgp->rm_body.rm_chunks[0]; in svc_rdma_xdr_decode_req()
196 va = decode_read_list(va, vaend); in svc_rdma_xdr_decode_req()
197 if (!va) in svc_rdma_xdr_decode_req()
199 va = decode_write_list(va, vaend); in svc_rdma_xdr_decode_req()
200 if (!va) in svc_rdma_xdr_decode_req()
202 va = decode_reply_array(va, vaend); in svc_rdma_xdr_decode_req()
203 if (!va) in svc_rdma_xdr_decode_req()
206 rqstp->rq_arg.head[0].iov_base = va; in svc_rdma_xdr_decode_req()
207 hdr_len = (unsigned long)va - (unsigned long)rmsgp; in svc_rdma_xdr_decode_req()
219 u32 *va; in svc_rdma_xdr_decode_deferred_req() local
228 va = &rmsgp->rm_body.rm_padded.rm_pempty[4]; in svc_rdma_xdr_decode_deferred_req()
229 rqstp->rq_arg.head[0].iov_base = va; in svc_rdma_xdr_decode_deferred_req()
230 hdrlen = (u32)((unsigned long)va - (unsigned long)rmsgp); in svc_rdma_xdr_decode_deferred_req()
238 va = &rmsgp->rm_body.rm_chunks[0]; in svc_rdma_xdr_decode_deferred_req()
241 for (ch = (struct rpcrdma_read_chunk *)va; in svc_rdma_xdr_decode_deferred_req()
243 va = (u32 *)&ch->rc_position; in svc_rdma_xdr_decode_deferred_req()
246 ary = (struct rpcrdma_write_array *)va; in svc_rdma_xdr_decode_deferred_req()
248 va = (u32 *)&ary->wc_nchunks; in svc_rdma_xdr_decode_deferred_req()
254 va = (u32 *)&ary->wc_array[ary->wc_nchunks].wc_target.rs_length; in svc_rdma_xdr_decode_deferred_req()
257 ary = (struct rpcrdma_write_array *)va; in svc_rdma_xdr_decode_deferred_req()
259 va = (u32 *)&ary->wc_nchunks; in svc_rdma_xdr_decode_deferred_req()
261 va = (u32 *)&ary->wc_array[ary->wc_nchunks]; in svc_rdma_xdr_decode_deferred_req()
263 rqstp->rq_arg.head[0].iov_base = va; in svc_rdma_xdr_decode_deferred_req()
264 hdrlen = (unsigned long)va - (unsigned long)rmsgp; in svc_rdma_xdr_decode_deferred_req()
272 enum rpcrdma_errcode err, u32 *va) in svc_rdma_xdr_encode_error() argument
274 u32 *startp = va; in svc_rdma_xdr_encode_error()
276 *va++ = htonl(rmsgp->rm_xid); in svc_rdma_xdr_encode_error()
277 *va++ = htonl(rmsgp->rm_vers); in svc_rdma_xdr_encode_error()
278 *va++ = htonl(xprt->sc_max_requests); in svc_rdma_xdr_encode_error()
279 *va++ = htonl(RDMA_ERROR); in svc_rdma_xdr_encode_error()
280 *va++ = htonl(err); in svc_rdma_xdr_encode_error()
282 *va++ = htonl(RPCRDMA_VERSION); in svc_rdma_xdr_encode_error()
283 *va++ = htonl(RPCRDMA_VERSION); in svc_rdma_xdr_encode_error()
286 return (int)((unsigned long)va - (unsigned long)startp); in svc_rdma_xdr_encode_error()