sreq              107 arch/s390/kernel/perf_cpum_sf.c 	struct hws_lsctl_request_block sreq;
sreq              109 arch/s390/kernel/perf_cpum_sf.c 	memset(&sreq, 0, sizeof(sreq));
sreq              110 arch/s390/kernel/perf_cpum_sf.c 	return lsctl(&sreq);
sreq               68 block/bsg.c    	struct scsi_request *sreq = scsi_req(rq);
sreq               75 block/bsg.c    	sreq->cmd_len = hdr->request_len;
sreq               76 block/bsg.c    	if (sreq->cmd_len > BLK_MAX_CDB) {
sreq               77 block/bsg.c    		sreq->cmd = kzalloc(sreq->cmd_len, GFP_KERNEL);
sreq               78 block/bsg.c    		if (!sreq->cmd)
sreq               82 block/bsg.c    	if (copy_from_user(sreq->cmd, uptr64(hdr->request), sreq->cmd_len))
sreq               84 block/bsg.c    	if (blk_verify_command(sreq->cmd, mode))
sreq               91 block/bsg.c    	struct scsi_request *sreq = scsi_req(rq);
sreq               97 block/bsg.c    	hdr->device_status = sreq->result & 0xff;
sreq               98 block/bsg.c    	hdr->transport_status = host_byte(sreq->result);
sreq               99 block/bsg.c    	hdr->driver_status = driver_byte(sreq->result);
sreq              105 block/bsg.c    	if (sreq->sense_len && hdr->response) {
sreq              107 block/bsg.c    					sreq->sense_len);
sreq              109 block/bsg.c    		if (copy_to_user(uptr64(hdr->response), sreq->sense, len))
sreq              116 block/bsg.c    		hdr->din_resid = sreq->resid_len;
sreq              118 block/bsg.c    		hdr->dout_resid = sreq->resid_len;
sreq               76 drivers/block/virtio_blk.c 	struct scsi_request sreq;	/* for SCSI passthrough, must be first */
sreq              112 drivers/block/virtio_blk.c 	sg_init_one(&cmd, vbr->sreq.cmd, vbr->sreq.cmd_len);
sreq              136 drivers/block/virtio_blk.c 	struct scsi_request *sreq = &vbr->sreq;
sreq              138 drivers/block/virtio_blk.c 	sreq->resid_len = virtio32_to_cpu(vblk->vdev, vbr->in_hdr.residual);
sreq              139 drivers/block/virtio_blk.c 	sreq->sense_len = virtio32_to_cpu(vblk->vdev, vbr->in_hdr.sense_len);
sreq              140 drivers/block/virtio_blk.c 	sreq->result = virtio32_to_cpu(vblk->vdev, vbr->in_hdr.errors);
sreq              759 drivers/block/virtio_blk.c 	vbr->sreq.sense = vbr->sense;
sreq              778 drivers/block/virtio_blk.c 	scsi_req_init(&vbr->sreq);
sreq              409 drivers/crypto/inside-secure/safexcel_cipher.c 				    struct safexcel_cipher_req *sreq,
sreq              434 drivers/crypto/inside-secure/safexcel_cipher.c 		if (sreq->direction == SAFEXCEL_ENCRYPT)
sreq              446 drivers/crypto/inside-secure/safexcel_cipher.c 		if (sreq->direction == SAFEXCEL_ENCRYPT)
sreq              493 drivers/crypto/inside-secure/safexcel_cipher.c 				      struct safexcel_cipher_req *sreq,
sreq              504 drivers/crypto/inside-secure/safexcel_cipher.c 	if (unlikely(!sreq->rdescs))
sreq              507 drivers/crypto/inside-secure/safexcel_cipher.c 	while (sreq->rdescs--) {
sreq              525 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
sreq              527 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
sreq              528 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
sreq              535 drivers/crypto/inside-secure/safexcel_cipher.c 	    (sreq->direction == SAFEXCEL_ENCRYPT)) {
sreq              537 drivers/crypto/inside-secure/safexcel_cipher.c 		sg_pcopy_to_buffer(dst, sreq->nr_dst, areq->iv,
sreq              549 drivers/crypto/inside-secure/safexcel_cipher.c 			     struct safexcel_cipher_req *sreq,
sreq              570 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->nr_src = sg_nents_for_len(src, totlen_src);
sreq              577 drivers/crypto/inside-secure/safexcel_cipher.c 		if (sreq->direction == SAFEXCEL_DECRYPT)
sreq              589 drivers/crypto/inside-secure/safexcel_cipher.c 		   (sreq->direction == SAFEXCEL_DECRYPT)) {
sreq              595 drivers/crypto/inside-secure/safexcel_cipher.c 		sg_pcopy_to_buffer(src, sreq->nr_src, areq->iv,
sreq              601 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->nr_dst = sg_nents_for_len(dst, totlen_dst);
sreq              611 drivers/crypto/inside-secure/safexcel_cipher.c 		sreq->nr_src = max(sreq->nr_src, sreq->nr_dst);
sreq              612 drivers/crypto/inside-secure/safexcel_cipher.c 		sreq->nr_dst = sreq->nr_src;
sreq              614 drivers/crypto/inside-secure/safexcel_cipher.c 		    (sreq->nr_src <= 0))) {
sreq              619 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
sreq              621 drivers/crypto/inside-secure/safexcel_cipher.c 		if (unlikely(totlen_src && (sreq->nr_src <= 0))) {
sreq              626 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_map_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
sreq              628 drivers/crypto/inside-secure/safexcel_cipher.c 		if (unlikely(totlen_dst && (sreq->nr_dst <= 0))) {
sreq              631 drivers/crypto/inside-secure/safexcel_cipher.c 			dma_unmap_sg(priv->dev, src, sreq->nr_src,
sreq              635 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_map_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
sreq              645 drivers/crypto/inside-secure/safexcel_cipher.c 	for_each_sg(src, sg, sreq->nr_src, i) {
sreq              683 drivers/crypto/inside-secure/safexcel_cipher.c 	safexcel_context_control(ctx, base, sreq, first_cdesc);
sreq              686 drivers/crypto/inside-secure/safexcel_cipher.c 				    sreq->direction, cryptlen,
sreq              693 drivers/crypto/inside-secure/safexcel_cipher.c 	for_each_sg(dst, sg, sreq->nr_dst, i) {
sreq              694 drivers/crypto/inside-secure/safexcel_cipher.c 		bool last = (i == sreq->nr_dst - 1);
sreq              763 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_BIDIRECTIONAL);
sreq              765 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, src, sreq->nr_src, DMA_TO_DEVICE);
sreq              766 drivers/crypto/inside-secure/safexcel_cipher.c 		dma_unmap_sg(priv->dev, dst, sreq->nr_dst, DMA_FROM_DEVICE);
sreq              775 drivers/crypto/inside-secure/safexcel_cipher.c 				      struct safexcel_cipher_req *sreq,
sreq              784 drivers/crypto/inside-secure/safexcel_cipher.c 	if (unlikely(!sreq->rdescs))
sreq              787 drivers/crypto/inside-secure/safexcel_cipher.c 	while (sreq->rdescs--) {
sreq              837 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
sreq              840 drivers/crypto/inside-secure/safexcel_cipher.c 	if (sreq->needs_inv) {
sreq              841 drivers/crypto/inside-secure/safexcel_cipher.c 		sreq->needs_inv = false;
sreq              842 drivers/crypto/inside-secure/safexcel_cipher.c 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
sreq              846 drivers/crypto/inside-secure/safexcel_cipher.c 						 req->dst, req->cryptlen, sreq,
sreq              860 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
sreq              863 drivers/crypto/inside-secure/safexcel_cipher.c 	if (sreq->needs_inv) {
sreq              864 drivers/crypto/inside-secure/safexcel_cipher.c 		sreq->needs_inv = false;
sreq              865 drivers/crypto/inside-secure/safexcel_cipher.c 		err = safexcel_handle_inv_result(priv, ring, async, sreq,
sreq              871 drivers/crypto/inside-secure/safexcel_cipher.c 						 sreq, should_complete, ret);
sreq              899 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
sreq              903 drivers/crypto/inside-secure/safexcel_cipher.c 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
sreq              905 drivers/crypto/inside-secure/safexcel_cipher.c 	if (sreq->needs_inv) {
sreq              917 drivers/crypto/inside-secure/safexcel_cipher.c 		ret = safexcel_send_req(async, ring, sreq, req->src,
sreq              922 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->rdescs = *results;
sreq              932 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
sreq              936 drivers/crypto/inside-secure/safexcel_cipher.c 	BUG_ON(!(priv->flags & EIP197_TRC_CACHE) && sreq->needs_inv);
sreq              938 drivers/crypto/inside-secure/safexcel_cipher.c 	if (sreq->needs_inv)
sreq              941 drivers/crypto/inside-secure/safexcel_cipher.c 		ret = safexcel_send_req(async, ring, sreq, req->src, req->dst,
sreq              945 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->rdescs = *results;
sreq              951 drivers/crypto/inside-secure/safexcel_cipher.c 				    struct safexcel_cipher_req *sreq,
sreq              962 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->needs_inv = true;
sreq              986 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = skcipher_request_ctx(req);
sreq              995 drivers/crypto/inside-secure/safexcel_cipher.c 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
sreq             1001 drivers/crypto/inside-secure/safexcel_cipher.c 	struct safexcel_cipher_req *sreq = aead_request_ctx(req);
sreq             1010 drivers/crypto/inside-secure/safexcel_cipher.c 	return safexcel_cipher_exit_inv(tfm, &req->base, sreq, &result);
sreq             1014 drivers/crypto/inside-secure/safexcel_cipher.c 			struct safexcel_cipher_req *sreq,
sreq             1021 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->needs_inv = false;
sreq             1022 drivers/crypto/inside-secure/safexcel_cipher.c 	sreq->direction = dir;
sreq             1026 drivers/crypto/inside-secure/safexcel_cipher.c 			sreq->needs_inv = true;
sreq              184 drivers/crypto/inside-secure/safexcel_hash.c 	struct safexcel_ahash_req *sreq = ahash_request_ctx(areq);
sreq              201 drivers/crypto/inside-secure/safexcel_hash.c 	if (sreq->nents) {
sreq              202 drivers/crypto/inside-secure/safexcel_hash.c 		dma_unmap_sg(priv->dev, areq->src, sreq->nents, DMA_TO_DEVICE);
sreq              203 drivers/crypto/inside-secure/safexcel_hash.c 		sreq->nents = 0;
sreq              206 drivers/crypto/inside-secure/safexcel_hash.c 	if (sreq->result_dma) {
sreq              207 drivers/crypto/inside-secure/safexcel_hash.c 		dma_unmap_single(priv->dev, sreq->result_dma, sreq->state_sz,
sreq              209 drivers/crypto/inside-secure/safexcel_hash.c 		sreq->result_dma = 0;
sreq              212 drivers/crypto/inside-secure/safexcel_hash.c 	if (sreq->cache_dma) {
sreq              213 drivers/crypto/inside-secure/safexcel_hash.c 		dma_unmap_single(priv->dev, sreq->cache_dma, sreq->cache_sz,
sreq              215 drivers/crypto/inside-secure/safexcel_hash.c 		sreq->cache_dma = 0;
sreq              216 drivers/crypto/inside-secure/safexcel_hash.c 		sreq->cache_sz = 0;
sreq              219 drivers/crypto/inside-secure/safexcel_hash.c 	if (sreq->finish) {
sreq              220 drivers/crypto/inside-secure/safexcel_hash.c 		if (sreq->hmac &&
sreq              221 drivers/crypto/inside-secure/safexcel_hash.c 		    (sreq->digest != CONTEXT_CONTROL_DIGEST_HMAC)) {
sreq              223 drivers/crypto/inside-secure/safexcel_hash.c 			memcpy(sreq->cache, sreq->state,
sreq              226 drivers/crypto/inside-secure/safexcel_hash.c 			memcpy(sreq->state, ctx->opad, sreq->state_sz);
sreq              228 drivers/crypto/inside-secure/safexcel_hash.c 			sreq->len = sreq->block_sz +
sreq              230 drivers/crypto/inside-secure/safexcel_hash.c 			sreq->processed = sreq->block_sz;
sreq              231 drivers/crypto/inside-secure/safexcel_hash.c 			sreq->hmac = 0;
sreq              241 drivers/crypto/inside-secure/safexcel_hash.c 		memcpy(areq->result, sreq->state,
sreq              245 drivers/crypto/inside-secure/safexcel_hash.c 	cache_len = safexcel_queued_len(sreq);
sreq              247 drivers/crypto/inside-secure/safexcel_hash.c 		memcpy(sreq->cache, sreq->cache_next, cache_len);
sreq               84 drivers/crypto/marvell/cipher.c 	struct mv_cesa_skcipher_std_req *sreq = &creq->std;
sreq               86 drivers/crypto/marvell/cipher.c 	size_t  len = min_t(size_t, req->cryptlen - sreq->offset,
sreq               89 drivers/crypto/marvell/cipher.c 	mv_cesa_adjust_op(engine, &sreq->op);
sreq               90 drivers/crypto/marvell/cipher.c 	memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
sreq               94 drivers/crypto/marvell/cipher.c 				 len, sreq->offset);
sreq               96 drivers/crypto/marvell/cipher.c 	sreq->size = len;
sreq               97 drivers/crypto/marvell/cipher.c 	mv_cesa_set_crypt_op_len(&sreq->op, len);
sreq              100 drivers/crypto/marvell/cipher.c 	if (!sreq->skip_ctx) {
sreq              101 drivers/crypto/marvell/cipher.c 		memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op));
sreq              102 drivers/crypto/marvell/cipher.c 		sreq->skip_ctx = true;
sreq              104 drivers/crypto/marvell/cipher.c 		memcpy_toio(engine->sram, &sreq->op, sizeof(sreq->op.desc));
sreq              118 drivers/crypto/marvell/cipher.c 	struct mv_cesa_skcipher_std_req *sreq = &creq->std;
sreq              124 drivers/crypto/marvell/cipher.c 				   sreq->size, sreq->offset);
sreq              126 drivers/crypto/marvell/cipher.c 	sreq->offset += len;
sreq              127 drivers/crypto/marvell/cipher.c 	if (sreq->offset < req->cryptlen)
sreq              170 drivers/crypto/marvell/cipher.c 	struct mv_cesa_skcipher_std_req *sreq = &creq->std;
sreq              172 drivers/crypto/marvell/cipher.c 	sreq->size = 0;
sreq              173 drivers/crypto/marvell/cipher.c 	sreq->offset = 0;
sreq              399 drivers/crypto/marvell/cipher.c 	struct mv_cesa_skcipher_std_req *sreq = &creq->std;
sreq              402 drivers/crypto/marvell/cipher.c 	sreq->op = *op_templ;
sreq              403 drivers/crypto/marvell/cipher.c 	sreq->skip_ctx = false;
sreq              156 drivers/crypto/marvell/hash.c 	struct mv_cesa_ahash_std_req *sreq = &creq->req.std;
sreq              168 drivers/crypto/marvell/hash.c 	if (!sreq->offset) {
sreq              178 drivers/crypto/marvell/hash.c 	len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset,
sreq              187 drivers/crypto/marvell/hash.c 		sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents,
sreq              192 drivers/crypto/marvell/hash.c 						   sreq->offset);
sreq              198 drivers/crypto/marvell/hash.c 	if (creq->last_req && sreq->offset == req->nbytes &&
sreq              256 drivers/crypto/marvell/hash.c 	struct mv_cesa_ahash_std_req *sreq = &creq->req.std;
sreq              258 drivers/crypto/marvell/hash.c 	if (sreq->offset < (req->nbytes - creq->cache_ptr))
sreq              275 drivers/crypto/marvell/hash.c 	struct mv_cesa_ahash_std_req *sreq = &creq->req.std;
sreq              277 drivers/crypto/marvell/hash.c 	sreq->offset = 0;
sreq              751 drivers/ide/ide-probe.c 	scsi_req_init(&req->sreq);
sreq              752 drivers/ide/ide-probe.c 	req->sreq.sense = req->sense;
sreq               50 include/linux/ide.h 	struct scsi_request sreq;
sreq              288 include/linux/netfilter/nf_conntrack_pptp.h 	struct PptpStartSessionRequest	sreq;