creq 129 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; creq 136 crypto/chacha20poly1305.c chacha_iv(creq->iv, req, 1); creq 143 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, creq 145 crypto/chacha20poly1305.c skcipher_request_set_tfm(&creq->req, ctx->chacha); creq 146 crypto/chacha20poly1305.c skcipher_request_set_crypt(&creq->req, src, dst, creq 147 crypto/chacha20poly1305.c rctx->cryptlen, creq->iv); creq 148 crypto/chacha20poly1305.c err = crypto_skcipher_decrypt(&creq->req); creq 367 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; creq 379 crypto/chacha20poly1305.c sg_init_one(creq->src, rctx->key, sizeof(rctx->key)); creq 381 crypto/chacha20poly1305.c chacha_iv(creq->iv, req, 0); creq 383 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, creq 385 crypto/chacha20poly1305.c skcipher_request_set_tfm(&creq->req, ctx->chacha); creq 386 crypto/chacha20poly1305.c skcipher_request_set_crypt(&creq->req, creq->src, creq->src, creq 387 crypto/chacha20poly1305.c POLY1305_KEY_SIZE, creq->iv); creq 389 crypto/chacha20poly1305.c err = crypto_skcipher_decrypt(&creq->req); creq 405 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; creq 412 crypto/chacha20poly1305.c chacha_iv(creq->iv, req, 1); creq 419 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, creq 421 crypto/chacha20poly1305.c skcipher_request_set_tfm(&creq->req, ctx->chacha); creq 422 crypto/chacha20poly1305.c skcipher_request_set_crypt(&creq->req, src, dst, creq 423 crypto/chacha20poly1305.c req->cryptlen, creq->iv); creq 424 crypto/chacha20poly1305.c err = crypto_skcipher_encrypt(&creq->req); creq 95 crypto/pcrypt.c struct aead_request *creq = pcrypt_request_ctx(preq); creq 109 crypto/pcrypt.c aead_request_set_tfm(creq, ctx->child); creq 110 crypto/pcrypt.c aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, creq 112 crypto/pcrypt.c aead_request_set_crypt(creq, req->src, req->dst, creq 114 crypto/pcrypt.c aead_request_set_ad(creq, req->assoclen); creq 140 crypto/pcrypt.c struct aead_request *creq = pcrypt_request_ctx(preq); creq 154 crypto/pcrypt.c aead_request_set_tfm(creq, ctx->child); creq 155 crypto/pcrypt.c aead_request_set_callback(creq, flags & ~CRYPTO_TFM_REQ_MAY_SLEEP, creq 157 crypto/pcrypt.c aead_request_set_crypt(creq, req->src, req->dst, creq 159 crypto/pcrypt.c aead_request_set_ad(creq, req->assoclen); creq 137 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; creq 141 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->flags = rctx->flags; creq 142 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : creq 145 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctrl.value = 0; creq 146 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; creq 147 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctrl.s.arg = rctx->ctrl_arg; creq 149 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param0 = cpu_to_be16(rctx->cryptlen); creq 150 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); creq 151 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); creq 154 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param3 = cpu_to_be16(param3.param); creq 156 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctx_handle = rctx->ctx_handle; creq 157 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); creq 194 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; creq 216 drivers/crypto/cavium/nitrox/nitrox_aead.c return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback, creq 225 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; creq 247 drivers/crypto/cavium/nitrox/nitrox_aead.c return nitrox_process_se_request(nctx->ndev, creq, nitrox_aead_callback, creq 416 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &aead_rctx->nkreq.creq; creq 438 drivers/crypto/cavium/nitrox/nitrox_aead.c return nitrox_process_se_request(nctx->ndev, creq, creq 448 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &aead_rctx->nkreq.creq; creq 471 drivers/crypto/cavium/nitrox/nitrox_aead.c return nitrox_process_se_request(nctx->ndev, creq, creq 209 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request creq; creq 615 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 617 drivers/crypto/cavium/nitrox/nitrox_req.h nkreq->src = alloc_req_buf(nents, ivsize, creq->gfp); creq 640 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 642 drivers/crypto/cavium/nitrox/nitrox_req.h creq->src = nitrox_creq_src_sg(iv, ivsize); creq 643 drivers/crypto/cavium/nitrox/nitrox_req.h sg = creq->src; creq 662 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 664 drivers/crypto/cavium/nitrox/nitrox_req.h nkreq->dst = alloc_req_buf(nents, extralen, creq->gfp); creq 673 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 675 drivers/crypto/cavium/nitrox/nitrox_req.h creq->orh = (u64 *)(nkreq->dst); creq 676 drivers/crypto/cavium/nitrox/nitrox_req.h set_orh_value(creq->orh); creq 681 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 683 drivers/crypto/cavium/nitrox/nitrox_req.h creq->comp = (u64 *)(nkreq->dst + ORH_HLEN); creq 684 drivers/crypto/cavium/nitrox/nitrox_req.h set_comp_value(creq->comp); creq 696 drivers/crypto/cavium/nitrox/nitrox_req.h struct se_crypto_request *creq = &nkreq->creq; creq 700 drivers/crypto/cavium/nitrox/nitrox_req.h creq->dst = nitrox_creq_dst_sg(nkreq->dst); creq 701 drivers/crypto/cavium/nitrox/nitrox_req.h sg = creq->dst; creq 711 drivers/crypto/cavium/nitrox/nitrox_req.h sg = create_single_sg(sg, creq->orh, ORH_HLEN); creq 717 drivers/crypto/cavium/nitrox/nitrox_req.h create_single_sg(sg, creq->comp, COMP_HLEN); creq 212 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c struct se_crypto_request *creq) creq 216 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c ret = dma_map_inbufs(sr, creq); creq 220 drivers/crypto/cavium/nitrox/nitrox_reqmgr.c ret = dma_map_outbufs(sr, creq); creq 210 drivers/crypto/cavium/nitrox/nitrox_skcipher.c struct se_crypto_request *creq; creq 213 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq = &nkreq->creq; creq 214 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->flags = skreq->base.flags; creq 215 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gfp = (skreq->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? creq 219 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->ctrl.value = 0; creq 220 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->opcode = FLEXI_CRYPTO_ENCRYPT_HMAC; creq 221 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->ctrl.s.arg = (enc ? ENCRYPT : DECRYPT); creq 223 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gph.param0 = cpu_to_be16(skreq->cryptlen); creq 224 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gph.param1 = 0; creq 226 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gph.param2 = cpu_to_be16(ivsize); creq 227 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->gph.param3 = 0; creq 229 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->ctx_handle = nctx->u.ctx_handle; creq 230 drivers/crypto/cavium/nitrox/nitrox_skcipher.c creq->ctrl.s.ctxl = sizeof(struct flexi_crypto_context); creq 243 drivers/crypto/cavium/nitrox/nitrox_skcipher.c return nitrox_process_se_request(nctx->ndev, creq, creq 47 drivers/crypto/ccree/cc_request_mgr.c struct cc_crypto_req creq; creq 353 drivers/crypto/ccree/cc_request_mgr.c struct cc_crypto_req *creq; creq 367 drivers/crypto/ccree/cc_request_mgr.c creq = &bli->creq; creq 368 drivers/crypto/ccree/cc_request_mgr.c req = creq->user_arg; creq 375 drivers/crypto/ccree/cc_request_mgr.c creq->user_cb(dev, req, -EINPROGRESS); creq 392 drivers/crypto/ccree/cc_request_mgr.c rc = cc_do_send_request(drvdata, &bli->creq, bli->desc, creq 399 drivers/crypto/ccree/cc_request_mgr.c creq->user_cb(dev, req, rc); creq 446 drivers/crypto/ccree/cc_request_mgr.c memcpy(&bli->creq, cc_req, sizeof(*cc_req)); creq 1539 drivers/crypto/inside-secure/safexcel_cipher.c struct safexcel_cipher_req *creq = aead_request_ctx(req); creq 1541 drivers/crypto/inside-secure/safexcel_cipher.c return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); creq 1546 drivers/crypto/inside-secure/safexcel_cipher.c struct safexcel_cipher_req *creq = aead_request_ctx(req); creq 1548 drivers/crypto/inside-secure/safexcel_cipher.c return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); creq 2263 drivers/crypto/inside-secure/safexcel_cipher.c struct safexcel_cipher_req *creq = aead_request_ctx(req); creq 2268 drivers/crypto/inside-secure/safexcel_cipher.c return safexcel_queue_req(&req->base, creq, SAFEXCEL_ENCRYPT); creq 2273 drivers/crypto/inside-secure/safexcel_cipher.c struct safexcel_cipher_req *creq = aead_request_ctx(req); creq 2278 drivers/crypto/inside-secure/safexcel_cipher.c return safexcel_queue_req(&req->base, creq, SAFEXCEL_DECRYPT); creq 170 drivers/crypto/marvell/cesa.c struct mv_cesa_req *creq) creq 173 drivers/crypto/marvell/cesa.c struct mv_cesa_engine *engine = creq->engine; creq 177 drivers/crypto/marvell/cesa.c if ((mv_cesa_req_get_type(creq) == CESA_DMA_REQ) && creq 179 drivers/crypto/marvell/cesa.c mv_cesa_tdma_chain(engine, creq); creq 722 drivers/crypto/marvell/cesa.h struct mv_cesa_req *creq); creq 59 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 62 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, creq 64 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, creq 67 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, creq 70 drivers/crypto/marvell/cipher.c mv_cesa_dma_cleanup(&creq->base); creq 75 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 77 drivers/crypto/marvell/cipher.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 83 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 84 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_std_req *sreq = &creq->std; creq 85 drivers/crypto/marvell/cipher.c struct mv_cesa_engine *engine = creq->base.engine; creq 92 drivers/crypto/marvell/cipher.c len = sg_pcopy_to_buffer(req->src, creq->src_nents, creq 117 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 118 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_std_req *sreq = &creq->std; creq 119 drivers/crypto/marvell/cipher.c struct mv_cesa_engine *engine = creq->base.engine; creq 122 drivers/crypto/marvell/cipher.c len = sg_pcopy_from_buffer(req->dst, creq->dst_nents, creq 137 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); creq 138 drivers/crypto/marvell/cipher.c struct mv_cesa_req *basereq = &creq->base; creq 149 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); creq 151 drivers/crypto/marvell/cipher.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 152 drivers/crypto/marvell/cipher.c mv_cesa_dma_step(&creq->base); creq 160 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 161 drivers/crypto/marvell/cipher.c struct mv_cesa_req *basereq = &creq->base; creq 169 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 170 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_std_req *sreq = &creq->std; creq 180 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); creq 181 drivers/crypto/marvell/cipher.c creq->base.engine = engine; creq 183 drivers/crypto/marvell/cipher.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 201 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(skreq); creq 202 drivers/crypto/marvell/cipher.c struct mv_cesa_engine *engine = creq->base.engine; creq 208 drivers/crypto/marvell/cipher.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) { creq 211 drivers/crypto/marvell/cipher.c basereq = &creq->base; creq 305 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 308 drivers/crypto/marvell/cipher.c struct mv_cesa_req *basereq = &creq->base; creq 317 drivers/crypto/marvell/cipher.c ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, creq 322 drivers/crypto/marvell/cipher.c ret = dma_map_sg(cesa_dev->dev, req->dst, creq->dst_nents, creq 329 drivers/crypto/marvell/cipher.c ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, creq 384 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->dst, creq->dst_nents, creq 388 drivers/crypto/marvell/cipher.c dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, creq 398 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 399 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_std_req *sreq = &creq->std; creq 400 drivers/crypto/marvell/cipher.c struct mv_cesa_req *basereq = &creq->base; creq 413 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 421 drivers/crypto/marvell/cipher.c creq->src_nents = sg_nents_for_len(req->src, req->cryptlen); creq 422 drivers/crypto/marvell/cipher.c if (creq->src_nents < 0) { creq 424 drivers/crypto/marvell/cipher.c return creq->src_nents; creq 426 drivers/crypto/marvell/cipher.c creq->dst_nents = sg_nents_for_len(req->dst, req->cryptlen); creq 427 drivers/crypto/marvell/cipher.c if (creq->dst_nents < 0) { creq 429 drivers/crypto/marvell/cipher.c return creq->dst_nents; creq 447 drivers/crypto/marvell/cipher.c struct mv_cesa_skcipher_req *creq = skcipher_request_ctx(req); creq 457 drivers/crypto/marvell/cipher.c ret = mv_cesa_queue_req(&req->base, &creq->base); creq 27 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 28 drivers/crypto/marvell/hash.c unsigned int len = req->nbytes + creq->cache_ptr; creq 30 drivers/crypto/marvell/hash.c if (!creq->last_req) creq 35 drivers/crypto/marvell/hash.c iter->src.op_offset = creq->cache_ptr; creq 93 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 95 drivers/crypto/marvell/hash.c mv_cesa_ahash_dma_free_padding(&creq->req.dma); creq 100 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 102 drivers/crypto/marvell/hash.c dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); creq 103 drivers/crypto/marvell/hash.c mv_cesa_ahash_dma_free_cache(&creq->req.dma); creq 104 drivers/crypto/marvell/hash.c mv_cesa_dma_cleanup(&creq->base); creq 109 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 111 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 117 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 119 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 123 drivers/crypto/marvell/hash.c static int mv_cesa_ahash_pad_len(struct mv_cesa_ahash_req *creq) creq 127 drivers/crypto/marvell/hash.c index = creq->len & CESA_HASH_BLOCK_SIZE_MSK; creq 133 drivers/crypto/marvell/hash.c static int mv_cesa_ahash_pad_req(struct mv_cesa_ahash_req *creq, u8 *buf) creq 139 drivers/crypto/marvell/hash.c padlen = mv_cesa_ahash_pad_len(creq); creq 142 drivers/crypto/marvell/hash.c if (creq->algo_le) { creq 143 drivers/crypto/marvell/hash.c __le64 bits = cpu_to_le64(creq->len << 3); creq 146 drivers/crypto/marvell/hash.c __be64 bits = cpu_to_be64(creq->len << 3); creq 155 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 156 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_std_req *sreq = &creq->req.std; creq 157 drivers/crypto/marvell/hash.c struct mv_cesa_engine *engine = creq->base.engine; creq 165 drivers/crypto/marvell/hash.c mv_cesa_adjust_op(engine, &creq->op_tmpl); creq 166 drivers/crypto/marvell/hash.c memcpy_toio(engine->sram, &creq->op_tmpl, sizeof(creq->op_tmpl)); creq 171 drivers/crypto/marvell/hash.c writel_relaxed(creq->state[i], engine->regs + CESA_IVDIG(i)); creq 174 drivers/crypto/marvell/hash.c if (creq->cache_ptr) creq 176 drivers/crypto/marvell/hash.c creq->cache, creq->cache_ptr); creq 178 drivers/crypto/marvell/hash.c len = min_t(size_t, req->nbytes + creq->cache_ptr - sreq->offset, creq 181 drivers/crypto/marvell/hash.c if (!creq->last_req) { creq 186 drivers/crypto/marvell/hash.c if (len - creq->cache_ptr) creq 187 drivers/crypto/marvell/hash.c sreq->offset += sg_pcopy_to_buffer(req->src, creq->src_nents, creq 190 drivers/crypto/marvell/hash.c creq->cache_ptr, creq 191 drivers/crypto/marvell/hash.c len - creq->cache_ptr, creq 194 drivers/crypto/marvell/hash.c op = &creq->op_tmpl; creq 198 drivers/crypto/marvell/hash.c if (creq->last_req && sreq->offset == req->nbytes && creq 199 drivers/crypto/marvell/hash.c creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX) { creq 209 drivers/crypto/marvell/hash.c creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX) { creq 210 drivers/crypto/marvell/hash.c mv_cesa_set_mac_op_total_len(op, creq->len); creq 212 drivers/crypto/marvell/hash.c int trailerlen = mv_cesa_ahash_pad_len(creq) + 8; creq 217 drivers/crypto/marvell/hash.c memcpy_fromio(creq->cache, creq 222 drivers/crypto/marvell/hash.c len += mv_cesa_ahash_pad_req(creq, creq 244 drivers/crypto/marvell/hash.c creq->cache_ptr = new_cache_ptr; creq 255 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 256 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_std_req *sreq = &creq->req.std; creq 258 drivers/crypto/marvell/hash.c if (sreq->offset < (req->nbytes - creq->cache_ptr)) creq 266 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 267 drivers/crypto/marvell/hash.c struct mv_cesa_req *basereq = &creq->base; creq 274 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 275 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_std_req *sreq = &creq->req.std; creq 282 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 283 drivers/crypto/marvell/hash.c struct mv_cesa_req *base = &creq->base; creq 291 drivers/crypto/marvell/hash.c for (i = 0; i < ARRAY_SIZE(creq->state); i++) creq 292 drivers/crypto/marvell/hash.c writel_relaxed(creq->state[i], engine->regs + creq 302 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); creq 304 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 313 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); creq 315 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 316 drivers/crypto/marvell/hash.c return mv_cesa_dma_process(&creq->base, status); creq 324 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); creq 325 drivers/crypto/marvell/hash.c struct mv_cesa_engine *engine = creq->base.engine; creq 331 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ && creq 332 drivers/crypto/marvell/hash.c (creq->base.chain.last->flags & CESA_TDMA_TYPE_MSK) == CESA_TDMA_RESULT) { creq 339 drivers/crypto/marvell/hash.c data = creq->base.chain.last->op->ctx.hash.hash; creq 341 drivers/crypto/marvell/hash.c creq->state[i] = cpu_to_le32(data[i]); creq 346 drivers/crypto/marvell/hash.c creq->state[i] = readl_relaxed(engine->regs + creq 348 drivers/crypto/marvell/hash.c if (creq->last_req) { creq 353 drivers/crypto/marvell/hash.c if (creq->algo_le) { creq 357 drivers/crypto/marvell/hash.c result[i] = cpu_to_le32(creq->state[i]); creq 362 drivers/crypto/marvell/hash.c result[i] = cpu_to_be32(creq->state[i]); creq 374 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); creq 376 drivers/crypto/marvell/hash.c creq->base.engine = engine; creq 378 drivers/crypto/marvell/hash.c if (mv_cesa_req_get_type(&creq->base) == CESA_DMA_REQ) creq 387 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(ahashreq); creq 389 drivers/crypto/marvell/hash.c if (creq->last_req) creq 394 drivers/crypto/marvell/hash.c if (creq->cache_ptr) creq 395 drivers/crypto/marvell/hash.c sg_pcopy_to_buffer(ahashreq->src, creq->src_nents, creq 396 drivers/crypto/marvell/hash.c creq->cache, creq 397 drivers/crypto/marvell/hash.c creq->cache_ptr, creq 398 drivers/crypto/marvell/hash.c ahashreq->nbytes - creq->cache_ptr); creq 411 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 413 drivers/crypto/marvell/hash.c memset(creq, 0, sizeof(*creq)); creq 421 drivers/crypto/marvell/hash.c creq->op_tmpl = *tmpl; creq 422 drivers/crypto/marvell/hash.c creq->len = 0; creq 423 drivers/crypto/marvell/hash.c creq->algo_le = algo_le; creq 439 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 442 drivers/crypto/marvell/hash.c if (creq->cache_ptr + req->nbytes < CESA_MAX_HASH_BLOCK_SIZE && !creq->last_req) { creq 448 drivers/crypto/marvell/hash.c sg_pcopy_to_buffer(req->src, creq->src_nents, creq 449 drivers/crypto/marvell/hash.c creq->cache + creq->cache_ptr, creq 452 drivers/crypto/marvell/hash.c creq->cache_ptr += req->nbytes; creq 488 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq, creq 491 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma; creq 494 drivers/crypto/marvell/hash.c if (!creq->cache_ptr) creq 501 drivers/crypto/marvell/hash.c memcpy(ahashdreq->cache, creq->cache, creq->cache_ptr); creq 506 drivers/crypto/marvell/hash.c creq->cache_ptr, creq 514 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq, creq 517 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_dma_req *ahashdreq = &creq->req.dma; creq 526 drivers/crypto/marvell/hash.c if (creq->len <= CESA_SA_DESC_MAC_SRC_TOTAL_LEN_MAX && frag_len) { creq 527 drivers/crypto/marvell/hash.c op = mv_cesa_dma_add_frag(chain, &creq->op_tmpl, frag_len, creq 532 drivers/crypto/marvell/hash.c mv_cesa_set_mac_op_total_len(op, creq->len); creq 556 drivers/crypto/marvell/hash.c trailerlen = mv_cesa_ahash_pad_req(creq, ahashdreq->padding); creq 569 drivers/crypto/marvell/hash.c op = mv_cesa_dma_add_frag(chain, &creq->op_tmpl, frag_len + len, creq 590 drivers/crypto/marvell/hash.c return mv_cesa_dma_add_frag(chain, &creq->op_tmpl, trailerlen - padoff, creq 596 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 599 drivers/crypto/marvell/hash.c struct mv_cesa_req *basereq = &creq->base; creq 610 drivers/crypto/marvell/hash.c if (!mv_cesa_mac_op_is_first_frag(&creq->op_tmpl)) creq 613 drivers/crypto/marvell/hash.c if (creq->src_nents) { creq 614 drivers/crypto/marvell/hash.c ret = dma_map_sg(cesa_dev->dev, req->src, creq->src_nents, creq 629 drivers/crypto/marvell/hash.c ret = mv_cesa_ahash_dma_add_cache(&basereq->chain, creq, flags); creq 651 drivers/crypto/marvell/hash.c op = mv_cesa_dma_add_frag(&basereq->chain, &creq->op_tmpl, creq 668 drivers/crypto/marvell/hash.c if (creq->last_req) creq 669 drivers/crypto/marvell/hash.c op = mv_cesa_ahash_dma_last_req(&basereq->chain, &iter, creq, creq 672 drivers/crypto/marvell/hash.c op = mv_cesa_dma_add_frag(&basereq->chain, &creq->op_tmpl, creq 695 drivers/crypto/marvell/hash.c if (!creq->last_req) creq 696 drivers/crypto/marvell/hash.c creq->cache_ptr = req->nbytes + creq->cache_ptr - creq 699 drivers/crypto/marvell/hash.c creq->cache_ptr = 0; creq 719 drivers/crypto/marvell/hash.c dma_unmap_sg(cesa_dev->dev, req->src, creq->src_nents, DMA_TO_DEVICE); creq 729 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 731 drivers/crypto/marvell/hash.c creq->src_nents = sg_nents_for_len(req->src, req->nbytes); creq 732 drivers/crypto/marvell/hash.c if (creq->src_nents < 0) { creq 734 drivers/crypto/marvell/hash.c return creq->src_nents; creq 750 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 765 drivers/crypto/marvell/hash.c ret = mv_cesa_queue_req(&req->base, &creq->base); creq 775 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 777 drivers/crypto/marvell/hash.c creq->len += req->nbytes; creq 784 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 785 drivers/crypto/marvell/hash.c struct mv_cesa_op_ctx *tmpl = &creq->op_tmpl; creq 787 drivers/crypto/marvell/hash.c mv_cesa_set_mac_op_total_len(tmpl, creq->len); creq 788 drivers/crypto/marvell/hash.c creq->last_req = true; creq 796 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 797 drivers/crypto/marvell/hash.c struct mv_cesa_op_ctx *tmpl = &creq->op_tmpl; creq 799 drivers/crypto/marvell/hash.c creq->len += req->nbytes; creq 800 drivers/crypto/marvell/hash.c mv_cesa_set_mac_op_total_len(tmpl, creq->len); creq 801 drivers/crypto/marvell/hash.c creq->last_req = true; creq 810 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 816 drivers/crypto/marvell/hash.c *len = creq->len; creq 817 drivers/crypto/marvell/hash.c memcpy(hash, creq->state, digsize); creq 819 drivers/crypto/marvell/hash.c memcpy(cache, creq->cache, creq->cache_ptr); creq 828 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 840 drivers/crypto/marvell/hash.c mv_cesa_update_op_cfg(&creq->op_tmpl, creq 844 drivers/crypto/marvell/hash.c creq->len = len; creq 845 drivers/crypto/marvell/hash.c memcpy(creq->state, hash, digsize); creq 846 drivers/crypto/marvell/hash.c creq->cache_ptr = 0; creq 852 drivers/crypto/marvell/hash.c memcpy(creq->cache, cache, cache_ptr); creq 853 drivers/crypto/marvell/hash.c creq->cache_ptr = cache_ptr; creq 860 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 867 drivers/crypto/marvell/hash.c creq->state[0] = MD5_H0; creq 868 drivers/crypto/marvell/hash.c creq->state[1] = MD5_H1; creq 869 drivers/crypto/marvell/hash.c creq->state[2] = MD5_H2; creq 870 drivers/crypto/marvell/hash.c creq->state[3] = MD5_H3; creq 929 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 936 drivers/crypto/marvell/hash.c creq->state[0] = SHA1_H0; creq 937 drivers/crypto/marvell/hash.c creq->state[1] = SHA1_H1; creq 938 drivers/crypto/marvell/hash.c creq->state[2] = SHA1_H2; creq 939 drivers/crypto/marvell/hash.c creq->state[3] = SHA1_H3; creq 940 drivers/crypto/marvell/hash.c creq->state[4] = SHA1_H4; creq 999 drivers/crypto/marvell/hash.c struct mv_cesa_ahash_req *creq = ahash_request_ctx(req); creq 1006 drivers/crypto/marvell/hash.c creq->state[0] = SHA256_H0; creq 1007 drivers/crypto/marvell/hash.c creq->state[1] = SHA256_H1; creq 1008 drivers/crypto/marvell/hash.c creq->state[2] = SHA256_H2; creq 1009 drivers/crypto/marvell/hash.c creq->state[3] = SHA256_H3; creq 1010 drivers/crypto/marvell/hash.c creq->state[4] = SHA256_H4; creq 1011 drivers/crypto/marvell/hash.c creq->state[5] = SHA256_H5; creq 1012 drivers/crypto/marvell/hash.c creq->state[6] = SHA256_H6; creq 1013 drivers/crypto/marvell/hash.c creq->state[7] = SHA256_H7; creq 1328 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq = rep_ext.creq.base; creq 1733 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.svga3d_flags = SVGA3D_FLAGS_LOWER_32(srf->flags); creq 1734 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.format = srf->format; creq 1735 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.mip_levels = srf->mip_levels[0]; creq 1736 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.drm_surface_flags = 0; creq 1737 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.multisample_count = srf->multisample_count; creq 1738 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.autogen_filter = srf->autogen_filter; creq 1739 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.array_size = srf->array_size; creq 1740 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.buffer_handle = backup_handle; creq 1741 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.base.base_size = srf->base_size; creq 1749 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.version = drm_vmw_gb_surface_v1; creq 1750 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.svga3d_flags_upper_32_bits = creq 1752 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.multisample_pattern = srf->multisample_pattern; creq 1753 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.quality_level = srf->quality_level; creq 1754 drivers/gpu/drm/vmwgfx/vmwgfx_surface.c rep->creq.must_be_zero = 0; creq 1388 drivers/infiniband/hw/bnxt_re/main.c pg_map = rdev->rcfw.creq.pbl[PBL_LVL_0].pg_map_arr; creq 1389 drivers/infiniband/hw/bnxt_re/main.c pages = rdev->rcfw.creq.pbl[rdev->rcfw.creq.level].pg_count; creq 369 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c struct bnxt_qplib_hwq *creq = &rcfw->creq; creq 376 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c spin_lock_irqsave(&creq->lock, flags); creq 377 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c raw_cons = creq->cons; creq 379 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c sw_cons = HWQ_CMP(raw_cons, creq); creq 380 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c creq_ptr = (struct creq_base **)creq->pbl_ptr; creq 382 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c if (!CREQ_CMP_VALID(creqe, raw_cons, creq->max_elements)) creq 415 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c if (creq->cons != raw_cons) { creq 416 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c creq->cons = raw_cons; creq 418 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c raw_cons, creq->max_elements, creq 421 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c spin_unlock_irqrestore(&creq->lock, flags); creq 427 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c struct bnxt_qplib_hwq *creq = &rcfw->creq; creq 432 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c sw_cons = HWQ_CMP(creq->cons, creq); creq 433 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c creq_ptr = (struct creq_base **)rcfw->creq.pbl_ptr; creq 564 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c bnxt_qplib_free_hwq(rcfw->pdev, &rcfw->creq); creq 576 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c rcfw->creq.max_elements = BNXT_QPLIB_CREQE_MAX_CNT; creq 578 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c if (bnxt_qplib_alloc_init_hwq(rcfw->pdev, &rcfw->creq, NULL, creq 579 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c &rcfw->creq.max_elements, creq 627 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c bnxt_qplib_ring_creq_db(rcfw->creq_bar_reg_iomem, rcfw->creq.cons, creq 628 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c rcfw->creq.max_elements, rcfw->creq_ring_id, creq 684 drivers/infiniband/hw/bnxt_re/qplib_rcfw.c rcfw->creq.cons, rcfw->creq.max_elements, creq 256 drivers/infiniband/hw/bnxt_re/qplib_rcfw.h struct bnxt_qplib_hwq creq; creq 123 drivers/net/can/pch_can.c u32 creq; creq 285 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[dir].creq, buff_num); creq 301 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[dir].creq, buff_num); creq 345 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); creq 355 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); creq 379 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, i); creq 384 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, i); creq 401 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, i); creq 451 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, mask); creq 468 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, mask); creq 594 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, obj_id); creq 616 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, obj_id); creq 646 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, obj_num); creq 717 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, int_stat); creq 933 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[1].creq, tx_obj_no); creq 989 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[dir].creq, buff_num); creq 1004 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, buffer_num); creq 1013 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, buffer_num); creq 1021 drivers/net/can/pch_can.c pch_can_rw_msg_obj(&priv->regs->ifregs[0].creq, buffer_num); creq 1227 drivers/net/wireless/marvell/libertas/cfg.c struct cfg80211_scan_request *creq = NULL; creq 1231 drivers/net/wireless/marvell/libertas/cfg.c creq = kzalloc(sizeof(*creq) + sizeof(struct cfg80211_ssid) + creq 1234 drivers/net/wireless/marvell/libertas/cfg.c if (!creq) creq 1238 drivers/net/wireless/marvell/libertas/cfg.c creq->ssids = (void *)&creq->channels[n_channels]; creq 1239 drivers/net/wireless/marvell/libertas/cfg.c creq->n_channels = n_channels; creq 1240 drivers/net/wireless/marvell/libertas/cfg.c creq->n_ssids = 1; creq 1256 drivers/net/wireless/marvell/libertas/cfg.c creq->channels[i] = &wiphy->bands[band]->channels[j]; creq 1262 drivers/net/wireless/marvell/libertas/cfg.c creq->n_channels = i; creq 1265 drivers/net/wireless/marvell/libertas/cfg.c memcpy(creq->ssids[0].ssid, sme->ssid, sme->ssid_len); creq 1266 drivers/net/wireless/marvell/libertas/cfg.c creq->ssids[0].ssid_len = sme->ssid_len; creq 1269 drivers/net/wireless/marvell/libertas/cfg.c kfree(creq); creq 1270 drivers/net/wireless/marvell/libertas/cfg.c creq = NULL; creq 1273 drivers/net/wireless/marvell/libertas/cfg.c return creq; creq 1288 drivers/net/wireless/marvell/libertas/cfg.c struct cfg80211_scan_request *creq; creq 1299 drivers/net/wireless/marvell/libertas/cfg.c creq = _new_connect_scan_req(wiphy, sme); creq 1300 drivers/net/wireless/marvell/libertas/cfg.c if (!creq) { creq 1306 drivers/net/wireless/marvell/libertas/cfg.c _internal_start_scan(priv, true, creq); creq 3325 drivers/usb/gadget/function/f_fs.c const struct usb_ctrlrequest *creq) creq 3334 drivers/usb/gadget/function/f_fs.c pr_vdebug("creq->bRequestType = %02x\n", creq->bRequestType); creq 3335 drivers/usb/gadget/function/f_fs.c pr_vdebug("creq->bRequest = %02x\n", creq->bRequest); creq 3336 drivers/usb/gadget/function/f_fs.c pr_vdebug("creq->wValue = %04x\n", le16_to_cpu(creq->wValue)); creq 3337 drivers/usb/gadget/function/f_fs.c pr_vdebug("creq->wIndex = %04x\n", le16_to_cpu(creq->wIndex)); creq 3338 drivers/usb/gadget/function/f_fs.c pr_vdebug("creq->wLength = %04x\n", le16_to_cpu(creq->wLength)); creq 3353 drivers/usb/gadget/function/f_fs.c switch (creq->bRequestType & USB_RECIP_MASK) { creq 3355 drivers/usb/gadget/function/f_fs.c ret = ffs_func_revmap_intf(func, le16_to_cpu(creq->wIndex)); creq 3361 drivers/usb/gadget/function/f_fs.c ret = ffs_func_revmap_ep(func, le16_to_cpu(creq->wIndex)); creq 3370 drivers/usb/gadget/function/f_fs.c ret = le16_to_cpu(creq->wIndex); creq 3376 drivers/usb/gadget/function/f_fs.c ffs->ev.setup = *creq; creq 3381 drivers/usb/gadget/function/f_fs.c return creq->wLength == 0 ? USB_GADGET_DELAYED_STATUS : 0; creq 3385 drivers/usb/gadget/function/f_fs.c const struct usb_ctrlrequest *creq, creq 3393 drivers/usb/gadget/function/f_fs.c switch (creq->bRequestType & USB_RECIP_MASK) { creq 3396 drivers/usb/gadget/function/f_fs.c le16_to_cpu(creq->wIndex)) >= 0); creq 3399 drivers/usb/gadget/function/f_fs.c le16_to_cpu(creq->wIndex)) >= 0); creq 991 include/uapi/drm/vmwgfx_drm.h struct drm_vmw_gb_surface_create_req creq; creq 1196 include/uapi/drm/vmwgfx_drm.h struct drm_vmw_gb_surface_create_ext_req creq; creq 2122 net/wireless/scan.c struct cfg80211_scan_request *creq = NULL; creq 2150 net/wireless/scan.c creq = kzalloc(sizeof(*creq) + sizeof(struct cfg80211_ssid) + creq 2153 net/wireless/scan.c if (!creq) { creq 2158 net/wireless/scan.c creq->wiphy = wiphy; creq 2159 net/wireless/scan.c creq->wdev = dev->ieee80211_ptr; creq 2161 net/wireless/scan.c creq->ssids = (void *)&creq->channels[n_channels]; creq 2162 net/wireless/scan.c creq->n_channels = n_channels; creq 2163 net/wireless/scan.c creq->n_ssids = 1; creq 2164 net/wireless/scan.c creq->scan_start = jiffies; creq 2200 net/wireless/scan.c creq->channels[i] = &wiphy->bands[band]->channels[j]; creq 2212 net/wireless/scan.c creq->n_channels = i; creq 2221 net/wireless/scan.c memcpy(creq->ssids[0].ssid, wreq->essid, wreq->essid_len); creq 2222 net/wireless/scan.c creq->ssids[0].ssid_len = wreq->essid_len; creq 2225 net/wireless/scan.c creq->n_ssids = 0; creq 2230 net/wireless/scan.c creq->rates[i] = (1 << wiphy->bands[i]->n_bitrates) - 1; creq 2232 net/wireless/scan.c eth_broadcast_addr(creq->bssid); creq 2234 net/wireless/scan.c rdev->scan_req = creq; creq 2235 net/wireless/scan.c err = rdev_scan(rdev, creq); creq 2242 net/wireless/scan.c creq = NULL; creq 2246 net/wireless/scan.c kfree(creq);