rctx 231 crypto/adiantum.c struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); rctx 251 crypto/adiantum.c poly1305_core_emit(&state, &rctx->header_hash); rctx 260 crypto/adiantum.c struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); rctx 262 crypto/adiantum.c struct shash_desc *hash_desc = &rctx->u.hash_desc; rctx 294 crypto/adiantum.c struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); rctx 300 crypto/adiantum.c if (!rctx->enc) rctx 301 crypto/adiantum.c crypto_cipher_decrypt_one(tctx->blockcipher, rctx->rbuf.bytes, rctx 302 crypto/adiantum.c rctx->rbuf.bytes); rctx 312 crypto/adiantum.c le128_add(&digest, &digest, &rctx->header_hash); rctx 313 crypto/adiantum.c le128_sub(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); rctx 314 crypto/adiantum.c scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->dst, rctx 334 crypto/adiantum.c struct adiantum_request_ctx *rctx = skcipher_request_ctx(req); rctx 343 crypto/adiantum.c rctx->enc = enc; rctx 354 crypto/adiantum.c le128_add(&digest, &digest, &rctx->header_hash); rctx 355 crypto/adiantum.c scatterwalk_map_and_copy(&rctx->rbuf.bignum, req->src, rctx 357 crypto/adiantum.c le128_add(&rctx->rbuf.bignum, &rctx->rbuf.bignum, &digest); rctx 361 crypto/adiantum.c crypto_cipher_encrypt_one(tctx->blockcipher, rctx->rbuf.bytes, rctx 362 crypto/adiantum.c rctx->rbuf.bytes); rctx 367 crypto/adiantum.c rctx->rbuf.words[4] = cpu_to_le32(1); rctx 368 crypto/adiantum.c rctx->rbuf.words[5] = 0; rctx 369 crypto/adiantum.c rctx->rbuf.words[6] = 0; rctx 370 crypto/adiantum.c rctx->rbuf.words[7] = 0; rctx 385 crypto/adiantum.c skcipher_request_set_tfm(&rctx->u.streamcipher_req, tctx->streamcipher); rctx 386 crypto/adiantum.c skcipher_request_set_crypt(&rctx->u.streamcipher_req, req->src, rctx 387 crypto/adiantum.c req->dst, stream_len, &rctx->rbuf); rctx 388 crypto/adiantum.c skcipher_request_set_callback(&rctx->u.streamcipher_req, rctx 391 crypto/adiantum.c return crypto_skcipher_encrypt(&rctx->u.streamcipher_req) ?: rctx 644 crypto/ccm.c struct crypto_rfc4309_req_ctx *rctx = aead_request_ctx(req); rctx 645 crypto/ccm.c struct aead_request *subreq = &rctx->subreq; rctx 661 crypto/ccm.c sg_init_table(rctx->src, 3); rctx 662 crypto/ccm.c sg_set_buf(rctx->src, iv + 16, req->assoclen - 8); rctx 663 crypto/ccm.c sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); rctx 664 crypto/ccm.c if (sg != rctx->src + 1) rctx 665 crypto/ccm.c sg_chain(rctx->src, 2, sg); rctx 668 crypto/ccm.c sg_init_table(rctx->dst, 3); rctx 669 crypto/ccm.c sg_set_buf(rctx->dst, iv + 16, req->assoclen - 8); rctx 670 crypto/ccm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); rctx 671 crypto/ccm.c if (sg != rctx->dst + 1) rctx 672 crypto/ccm.c sg_chain(rctx->dst, 2, sg); rctx 678 crypto/ccm.c aead_request_set_crypt(subreq, rctx->src, rctx 679 crypto/ccm.c req->src == req->dst ? rctx->src : rctx->dst, rctx 76 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 78 crypto/chacha20poly1305.c rctx->flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; rctx 99 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 100 crypto/chacha20poly1305.c u8 tag[sizeof(rctx->tag)]; rctx 103 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, rctx 105 crypto/chacha20poly1305.c if (crypto_memneq(tag, rctx->tag, sizeof(tag))) rctx 112 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 114 crypto/chacha20poly1305.c scatterwalk_map_and_copy(rctx->tag, req->dst, rctx 115 crypto/chacha20poly1305.c req->assoclen + rctx->cryptlen, rctx 116 crypto/chacha20poly1305.c sizeof(rctx->tag), 1); rctx 128 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 129 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; rctx 133 crypto/chacha20poly1305.c if (rctx->cryptlen == 0) rctx 138 crypto/chacha20poly1305.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); rctx 141 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); rctx 143 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, rctx 147 crypto/chacha20poly1305.c rctx->cryptlen, creq->iv); rctx 158 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 160 crypto/chacha20poly1305.c if (rctx->cryptlen == req->cryptlen) /* encrypting */ rctx 175 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 176 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 179 crypto/chacha20poly1305.c preq->tail.assoclen = cpu_to_le64(rctx->assoclen); rctx 180 crypto/chacha20poly1305.c preq->tail.cryptlen = cpu_to_le64(rctx->cryptlen); rctx 183 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 187 crypto/chacha20poly1305.c rctx->tag, sizeof(preq->tail)); rctx 204 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 205 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 209 crypto/chacha20poly1305.c padlen = -rctx->cryptlen % POLY1305_BLOCK_SIZE; rctx 213 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 233 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 234 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 238 crypto/chacha20poly1305.c if (rctx->cryptlen == req->cryptlen) /* encrypting */ rctx 241 crypto/chacha20poly1305.c crypt = scatterwalk_ffwd(rctx->src, crypt, req->assoclen); rctx 243 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 246 crypto/chacha20poly1305.c ahash_request_set_crypt(&preq->req, crypt, NULL, rctx->cryptlen); rctx 263 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 264 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 268 crypto/chacha20poly1305.c padlen = -rctx->assoclen % POLY1305_BLOCK_SIZE; rctx 272 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 292 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 293 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 296 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 299 crypto/chacha20poly1305.c ahash_request_set_crypt(&preq->req, req->src, NULL, rctx->assoclen); rctx 316 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 317 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 320 crypto/chacha20poly1305.c sg_init_one(preq->src, rctx->key, sizeof(rctx->key)); rctx 322 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 325 crypto/chacha20poly1305.c ahash_request_set_crypt(&preq->req, preq->src, NULL, sizeof(rctx->key)); rctx 342 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 343 crypto/chacha20poly1305.c struct poly_req *preq = &rctx->u.poly; rctx 346 crypto/chacha20poly1305.c ahash_request_set_callback(&preq->req, rctx->flags, rctx 366 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 367 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; rctx 370 crypto/chacha20poly1305.c rctx->assoclen = req->assoclen; rctx 373 crypto/chacha20poly1305.c if (rctx->assoclen < 8) rctx 375 crypto/chacha20poly1305.c rctx->assoclen -= 8; rctx 378 crypto/chacha20poly1305.c memset(rctx->key, 0, sizeof(rctx->key)); rctx 379 crypto/chacha20poly1305.c sg_init_one(creq->src, rctx->key, sizeof(rctx->key)); rctx 383 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, rctx 404 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 405 crypto/chacha20poly1305.c struct chacha_req *creq = &rctx->u.chacha; rctx 414 crypto/chacha20poly1305.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); rctx 417 crypto/chacha20poly1305.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); rctx 419 crypto/chacha20poly1305.c skcipher_request_set_callback(&creq->req, rctx->flags, rctx 434 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 436 crypto/chacha20poly1305.c rctx->cryptlen = req->cryptlen; rctx 437 crypto/chacha20poly1305.c rctx->flags = aead_request_flags(req); rctx 456 crypto/chacha20poly1305.c struct chachapoly_req_ctx *rctx = aead_request_ctx(req); rctx 458 crypto/chacha20poly1305.c rctx->cryptlen = req->cryptlen - POLY1305_DIGEST_SIZE; rctx 459 crypto/chacha20poly1305.c rctx->flags = aead_request_flags(req); rctx 272 crypto/cryptd.c struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); rctx 276 crypto/cryptd.c rctx->complete(&req->base, err); rctx 287 crypto/cryptd.c struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); rctx 305 crypto/cryptd.c req->base.complete = rctx->complete; rctx 315 crypto/cryptd.c struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); rctx 333 crypto/cryptd.c req->base.complete = rctx->complete; rctx 342 crypto/cryptd.c struct cryptd_skcipher_request_ctx *rctx = skcipher_request_ctx(req); rctx 347 crypto/cryptd.c rctx->complete = req->base.complete; rctx 508 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 513 crypto/cryptd.c rctx->complete = req->base.complete; rctx 523 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 527 crypto/cryptd.c rctx->complete(&req->base, err); rctx 539 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 540 crypto/cryptd.c struct shash_desc *desc = &rctx->desc; rctx 549 crypto/cryptd.c req->base.complete = rctx->complete; rctx 563 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx; rctx 565 crypto/cryptd.c rctx = ahash_request_ctx(req); rctx 570 crypto/cryptd.c err = shash_ahash_update(req, &rctx->desc); rctx 572 crypto/cryptd.c req->base.complete = rctx->complete; rctx 586 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 591 crypto/cryptd.c err = crypto_shash_final(&rctx->desc, req->result); rctx 593 crypto/cryptd.c req->base.complete = rctx->complete; rctx 607 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 612 crypto/cryptd.c err = shash_ahash_finup(req, &rctx->desc); rctx 614 crypto/cryptd.c req->base.complete = rctx->complete; rctx 630 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 631 crypto/cryptd.c struct shash_desc *desc = &rctx->desc; rctx 640 crypto/cryptd.c req->base.complete = rctx->complete; rctx 653 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 655 crypto/cryptd.c return crypto_shash_export(&rctx->desc, out); rctx 757 crypto/cryptd.c struct cryptd_aead_request_ctx *rctx; rctx 763 crypto/cryptd.c rctx = aead_request_ctx(req); rctx 764 crypto/cryptd.c compl = rctx->complete; rctx 808 crypto/cryptd.c struct cryptd_aead_request_ctx *rctx = aead_request_ctx(req); rctx 812 crypto/cryptd.c rctx->complete = req->base.complete; rctx 1048 crypto/cryptd.c struct cryptd_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 1049 crypto/cryptd.c return &rctx->desc; rctx 201 crypto/ctr.c struct crypto_rfc3686_req_ctx *rctx = rctx 203 crypto/ctr.c struct skcipher_request *subreq = &rctx->subreq; rctx 204 crypto/ctr.c u8 *iv = rctx->iv; rctx 67 crypto/cts.c struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); rctx 72 crypto/cts.c return PTR_ALIGN((u8 *)(rctx + 1) + crypto_skcipher_reqsize(child), rctx 104 crypto/cts.c struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); rctx 106 crypto/cts.c struct skcipher_request *subreq = &rctx->subreq; rctx 113 crypto/cts.c offset = rctx->offset; rctx 116 crypto/cts.c sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); rctx 150 crypto/cts.c struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); rctx 152 crypto/cts.c struct skcipher_request *subreq = &rctx->subreq; rctx 172 crypto/cts.c rctx->offset = offset; rctx 185 crypto/cts.c struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); rctx 187 crypto/cts.c struct skcipher_request *subreq = &rctx->subreq; rctx 195 crypto/cts.c offset = rctx->offset; rctx 198 crypto/cts.c sg = scatterwalk_ffwd(rctx->sg, req->dst, offset - bsize); rctx 244 crypto/cts.c struct crypto_cts_reqctx *rctx = skcipher_request_ctx(req); rctx 246 crypto/cts.c struct skcipher_request *subreq = &rctx->subreq; rctx 272 crypto/cts.c rctx->offset = offset; rctx 189 crypto/essiv.c struct essiv_aead_request_ctx *rctx = aead_request_ctx(req); rctx 191 crypto/essiv.c if (rctx->assoc) rctx 192 crypto/essiv.c kfree(rctx->assoc); rctx 200 crypto/essiv.c struct essiv_aead_request_ctx *rctx = aead_request_ctx(req); rctx 201 crypto/essiv.c struct aead_request *subreq = &rctx->aead_req; rctx 212 crypto/essiv.c rctx->assoc = NULL; rctx 232 crypto/essiv.c sg_init_table(rctx->sg, 4); rctx 239 crypto/essiv.c rctx->assoc = kmalloc(ssize, GFP_ATOMIC); rctx 240 crypto/essiv.c if (!rctx->assoc) rctx 243 crypto/essiv.c scatterwalk_map_and_copy(rctx->assoc, req->src, 0, rctx 245 crypto/essiv.c sg_set_buf(rctx->sg, rctx->assoc, ssize); rctx 247 crypto/essiv.c sg_set_page(rctx->sg, sg_page(req->src), ssize, rctx 251 crypto/essiv.c sg_set_buf(rctx->sg + 1, iv, ivsize); rctx 252 crypto/essiv.c sg = scatterwalk_ffwd(rctx->sg + 2, req->src, req->assoclen); rctx 253 crypto/essiv.c if (sg != rctx->sg + 2) rctx 254 crypto/essiv.c sg_chain(rctx->sg, 3, sg); rctx 256 crypto/essiv.c src = rctx->sg; rctx 268 crypto/essiv.c if (rctx->assoc && err != -EINPROGRESS) rctx 269 crypto/essiv.c kfree(rctx->assoc); rctx 763 crypto/gcm.c struct crypto_rfc4106_req_ctx *rctx = aead_request_ctx(req); rctx 766 crypto/gcm.c struct aead_request *subreq = &rctx->subreq; rctx 777 crypto/gcm.c sg_init_table(rctx->src, 3); rctx 778 crypto/gcm.c sg_set_buf(rctx->src, iv + GCM_AES_IV_SIZE, req->assoclen - 8); rctx 779 crypto/gcm.c sg = scatterwalk_ffwd(rctx->src + 1, req->src, req->assoclen); rctx 780 crypto/gcm.c if (sg != rctx->src + 1) rctx 781 crypto/gcm.c sg_chain(rctx->src, 2, sg); rctx 784 crypto/gcm.c sg_init_table(rctx->dst, 3); rctx 785 crypto/gcm.c sg_set_buf(rctx->dst, iv + GCM_AES_IV_SIZE, req->assoclen - 8); rctx 786 crypto/gcm.c sg = scatterwalk_ffwd(rctx->dst + 1, req->dst, req->assoclen); rctx 787 crypto/gcm.c if (sg != rctx->dst + 1) rctx 788 crypto/gcm.c sg_chain(rctx->dst, 2, sg); rctx 794 crypto/gcm.c aead_request_set_crypt(subreq, rctx->src, rctx 795 crypto/gcm.c req->src == req->dst ? rctx->src : rctx->dst, rctx 992 crypto/gcm.c struct crypto_rfc4543_req_ctx *rctx = aead_request_ctx(req); rctx 993 crypto/gcm.c struct aead_request *subreq = &rctx->subreq; rctx 995 crypto/gcm.c u8 *iv = PTR_ALIGN((u8 *)(rctx + 1) + crypto_aead_reqsize(ctx->child), rctx 147 crypto/lrw.c struct rctx *rctx = skcipher_request_ctx(req); rctx 148 crypto/lrw.c be128 t = rctx->t; rctx 155 crypto/lrw.c req = &rctx->subreq; rctx 214 crypto/lrw.c struct rctx *rctx = skcipher_request_ctx(req); rctx 216 crypto/lrw.c rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; rctx 226 crypto/lrw.c struct rctx *rctx = skcipher_request_ctx(req); rctx 227 crypto/lrw.c struct skcipher_request *subreq = &rctx->subreq; rctx 236 crypto/lrw.c memcpy(&rctx->t, req->iv, sizeof(rctx->t)); rctx 239 crypto/lrw.c gf128mul_64k_bbe(&rctx->t, ctx->table); rctx 244 crypto/lrw.c struct rctx *rctx = skcipher_request_ctx(req); rctx 245 crypto/lrw.c struct skcipher_request *subreq = &rctx->subreq; rctx 255 crypto/lrw.c struct rctx *rctx = skcipher_request_ctx(req); rctx 256 crypto/lrw.c struct skcipher_request *subreq = &rctx->subreq; rctx 278 crypto/lrw.c sizeof(struct rctx)); rctx 215 crypto/rmd128.c struct rmd128_ctx *rctx = shash_desc_ctx(desc); rctx 217 crypto/rmd128.c rctx->byte_count = 0; rctx 219 crypto/rmd128.c rctx->state[0] = RMD_H0; rctx 220 crypto/rmd128.c rctx->state[1] = RMD_H1; rctx 221 crypto/rmd128.c rctx->state[2] = RMD_H2; rctx 222 crypto/rmd128.c rctx->state[3] = RMD_H3; rctx 224 crypto/rmd128.c memset(rctx->buffer, 0, sizeof(rctx->buffer)); rctx 232 crypto/rmd128.c struct rmd128_ctx *rctx = shash_desc_ctx(desc); rctx 233 crypto/rmd128.c const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rctx 235 crypto/rmd128.c rctx->byte_count += len; rctx 239 crypto/rmd128.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 244 crypto/rmd128.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 247 crypto/rmd128.c rmd128_transform(rctx->state, rctx->buffer); rctx 251 crypto/rmd128.c while (len >= sizeof(rctx->buffer)) { rctx 252 crypto/rmd128.c memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rctx 253 crypto/rmd128.c rmd128_transform(rctx->state, rctx->buffer); rctx 254 crypto/rmd128.c data += sizeof(rctx->buffer); rctx 255 crypto/rmd128.c len -= sizeof(rctx->buffer); rctx 258 crypto/rmd128.c memcpy(rctx->buffer, data, len); rctx 267 crypto/rmd128.c struct rmd128_ctx *rctx = shash_desc_ctx(desc); rctx 273 crypto/rmd128.c bits = cpu_to_le64(rctx->byte_count << 3); rctx 276 crypto/rmd128.c index = rctx->byte_count & 0x3f; rctx 285 crypto/rmd128.c dst[i] = cpu_to_le32p(&rctx->state[i]); rctx 288 crypto/rmd128.c memset(rctx, 0, sizeof(*rctx)); rctx 258 crypto/rmd160.c struct rmd160_ctx *rctx = shash_desc_ctx(desc); rctx 260 crypto/rmd160.c rctx->byte_count = 0; rctx 262 crypto/rmd160.c rctx->state[0] = RMD_H0; rctx 263 crypto/rmd160.c rctx->state[1] = RMD_H1; rctx 264 crypto/rmd160.c rctx->state[2] = RMD_H2; rctx 265 crypto/rmd160.c rctx->state[3] = RMD_H3; rctx 266 crypto/rmd160.c rctx->state[4] = RMD_H4; rctx 268 crypto/rmd160.c memset(rctx->buffer, 0, sizeof(rctx->buffer)); rctx 276 crypto/rmd160.c struct rmd160_ctx *rctx = shash_desc_ctx(desc); rctx 277 crypto/rmd160.c const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rctx 279 crypto/rmd160.c rctx->byte_count += len; rctx 283 crypto/rmd160.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 288 crypto/rmd160.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 291 crypto/rmd160.c rmd160_transform(rctx->state, rctx->buffer); rctx 295 crypto/rmd160.c while (len >= sizeof(rctx->buffer)) { rctx 296 crypto/rmd160.c memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rctx 297 crypto/rmd160.c rmd160_transform(rctx->state, rctx->buffer); rctx 298 crypto/rmd160.c data += sizeof(rctx->buffer); rctx 299 crypto/rmd160.c len -= sizeof(rctx->buffer); rctx 302 crypto/rmd160.c memcpy(rctx->buffer, data, len); rctx 311 crypto/rmd160.c struct rmd160_ctx *rctx = shash_desc_ctx(desc); rctx 317 crypto/rmd160.c bits = cpu_to_le64(rctx->byte_count << 3); rctx 320 crypto/rmd160.c index = rctx->byte_count & 0x3f; rctx 329 crypto/rmd160.c dst[i] = cpu_to_le32p(&rctx->state[i]); rctx 332 crypto/rmd160.c memset(rctx, 0, sizeof(*rctx)); rctx 230 crypto/rmd256.c struct rmd256_ctx *rctx = shash_desc_ctx(desc); rctx 232 crypto/rmd256.c rctx->byte_count = 0; rctx 234 crypto/rmd256.c rctx->state[0] = RMD_H0; rctx 235 crypto/rmd256.c rctx->state[1] = RMD_H1; rctx 236 crypto/rmd256.c rctx->state[2] = RMD_H2; rctx 237 crypto/rmd256.c rctx->state[3] = RMD_H3; rctx 238 crypto/rmd256.c rctx->state[4] = RMD_H5; rctx 239 crypto/rmd256.c rctx->state[5] = RMD_H6; rctx 240 crypto/rmd256.c rctx->state[6] = RMD_H7; rctx 241 crypto/rmd256.c rctx->state[7] = RMD_H8; rctx 243 crypto/rmd256.c memset(rctx->buffer, 0, sizeof(rctx->buffer)); rctx 251 crypto/rmd256.c struct rmd256_ctx *rctx = shash_desc_ctx(desc); rctx 252 crypto/rmd256.c const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rctx 254 crypto/rmd256.c rctx->byte_count += len; rctx 258 crypto/rmd256.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 263 crypto/rmd256.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 266 crypto/rmd256.c rmd256_transform(rctx->state, rctx->buffer); rctx 270 crypto/rmd256.c while (len >= sizeof(rctx->buffer)) { rctx 271 crypto/rmd256.c memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rctx 272 crypto/rmd256.c rmd256_transform(rctx->state, rctx->buffer); rctx 273 crypto/rmd256.c data += sizeof(rctx->buffer); rctx 274 crypto/rmd256.c len -= sizeof(rctx->buffer); rctx 277 crypto/rmd256.c memcpy(rctx->buffer, data, len); rctx 286 crypto/rmd256.c struct rmd256_ctx *rctx = shash_desc_ctx(desc); rctx 292 crypto/rmd256.c bits = cpu_to_le64(rctx->byte_count << 3); rctx 295 crypto/rmd256.c index = rctx->byte_count & 0x3f; rctx 304 crypto/rmd256.c dst[i] = cpu_to_le32p(&rctx->state[i]); rctx 307 crypto/rmd256.c memset(rctx, 0, sizeof(*rctx)); rctx 277 crypto/rmd320.c struct rmd320_ctx *rctx = shash_desc_ctx(desc); rctx 279 crypto/rmd320.c rctx->byte_count = 0; rctx 281 crypto/rmd320.c rctx->state[0] = RMD_H0; rctx 282 crypto/rmd320.c rctx->state[1] = RMD_H1; rctx 283 crypto/rmd320.c rctx->state[2] = RMD_H2; rctx 284 crypto/rmd320.c rctx->state[3] = RMD_H3; rctx 285 crypto/rmd320.c rctx->state[4] = RMD_H4; rctx 286 crypto/rmd320.c rctx->state[5] = RMD_H5; rctx 287 crypto/rmd320.c rctx->state[6] = RMD_H6; rctx 288 crypto/rmd320.c rctx->state[7] = RMD_H7; rctx 289 crypto/rmd320.c rctx->state[8] = RMD_H8; rctx 290 crypto/rmd320.c rctx->state[9] = RMD_H9; rctx 292 crypto/rmd320.c memset(rctx->buffer, 0, sizeof(rctx->buffer)); rctx 300 crypto/rmd320.c struct rmd320_ctx *rctx = shash_desc_ctx(desc); rctx 301 crypto/rmd320.c const u32 avail = sizeof(rctx->buffer) - (rctx->byte_count & 0x3f); rctx 303 crypto/rmd320.c rctx->byte_count += len; rctx 307 crypto/rmd320.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 312 crypto/rmd320.c memcpy((char *)rctx->buffer + (sizeof(rctx->buffer) - avail), rctx 315 crypto/rmd320.c rmd320_transform(rctx->state, rctx->buffer); rctx 319 crypto/rmd320.c while (len >= sizeof(rctx->buffer)) { rctx 320 crypto/rmd320.c memcpy(rctx->buffer, data, sizeof(rctx->buffer)); rctx 321 crypto/rmd320.c rmd320_transform(rctx->state, rctx->buffer); rctx 322 crypto/rmd320.c data += sizeof(rctx->buffer); rctx 323 crypto/rmd320.c len -= sizeof(rctx->buffer); rctx 326 crypto/rmd320.c memcpy(rctx->buffer, data, len); rctx 335 crypto/rmd320.c struct rmd320_ctx *rctx = shash_desc_ctx(desc); rctx 341 crypto/rmd320.c bits = cpu_to_le64(rctx->byte_count << 3); rctx 344 crypto/rmd320.c index = rctx->byte_count & 0x3f; rctx 353 crypto/rmd320.c dst[i] = cpu_to_le32p(&rctx->state[i]); rctx 356 crypto/rmd320.c memset(rctx, 0, sizeof(*rctx)); rctx 89 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 94 crypto/xts.c le128 t = rctx->t; rctx 98 crypto/xts.c req = &rctx->subreq; rctx 117 crypto/xts.c rctx->t = t; rctx 122 crypto/xts.c gf128mul_x_ble(&rctx->t, &t); rctx 153 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 155 crypto/xts.c scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); rctx 156 crypto/xts.c le128_xor(&b, &rctx->t, &b); rctx 157 crypto/xts.c scatterwalk_map_and_copy(&b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); rctx 168 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 169 crypto/xts.c struct skcipher_request *subreq = &rctx->subreq; rctx 174 crypto/xts.c rctx->tail = scatterwalk_ffwd(rctx->sg, req->dst, rctx 177 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); rctx 181 crypto/xts.c le128_xor(b, &rctx->t, b); rctx 183 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE + tail, 1); rctx 187 crypto/xts.c skcipher_request_set_crypt(subreq, rctx->tail, rctx->tail, rctx 194 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 0); rctx 195 crypto/xts.c le128_xor(b, &rctx->t, b); rctx 196 crypto/xts.c scatterwalk_map_and_copy(b, rctx->tail, 0, XTS_BLOCK_SIZE, 1); rctx 206 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 208 crypto/xts.c rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; rctx 226 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 228 crypto/xts.c rctx->subreq.base.flags &= ~CRYPTO_TFM_REQ_MAY_SLEEP; rctx 244 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 245 crypto/xts.c struct skcipher_request *subreq = &rctx->subreq; rctx 256 crypto/xts.c crypto_cipher_encrypt_one(ctx->tweak, (u8 *)&rctx->t, req->iv); rctx 263 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 264 crypto/xts.c struct skcipher_request *subreq = &rctx->subreq; rctx 280 crypto/xts.c struct rctx *rctx = skcipher_request_ctx(req); rctx 281 crypto/xts.c struct skcipher_request *subreq = &rctx->subreq; rctx 318 crypto/xts.c sizeof(struct rctx)); rctx 463 drivers/crypto/amcc/crypto4xx_alg.c struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req); rctx 489 drivers/crypto/amcc/crypto4xx_alg.c sa, ctx->sa_len, req->assoclen, rctx->dst); rctx 612 drivers/crypto/amcc/crypto4xx_alg.c struct crypto4xx_aead_reqctx *rctx = aead_request_ctx(req); rctx 628 drivers/crypto/amcc/crypto4xx_alg.c ctx->sa_len, req->assoclen, rctx->dst); rctx 477 drivers/crypto/atmel-aes.c const struct atmel_aes_reqctx *rctx) rctx 480 drivers/crypto/atmel-aes.c dd->flags = (dd->flags & AES_FLAGS_PERSISTENT) | rctx->mode; rctx 495 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 502 drivers/crypto/atmel-aes.c if (rctx->mode & AES_FLAGS_ENCRYPT) { rctx 507 drivers/crypto/atmel-aes.c memcpy(req->info, rctx->lastc, ivsize); rctx 984 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 989 drivers/crypto/atmel-aes.c atmel_aes_set_mode(dd, rctx); rctx 1070 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 1073 drivers/crypto/atmel-aes.c atmel_aes_set_mode(dd, rctx); rctx 1089 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx; rctx 1119 drivers/crypto/atmel-aes.c rctx = ablkcipher_request_ctx(req); rctx 1120 drivers/crypto/atmel-aes.c rctx->mode = mode; rctx 1126 drivers/crypto/atmel-aes.c scatterwalk_map_and_copy(rctx->lastc, req->src, rctx 1532 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx = aead_request_ctx(req); rctx 1539 drivers/crypto/atmel-aes.c atmel_aes_set_mode(dd, rctx); rctx 1747 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx; rctx 1758 drivers/crypto/atmel-aes.c rctx = aead_request_ctx(req); rctx 1759 drivers/crypto/atmel-aes.c rctx->mode = AES_FLAGS_GCM | mode; rctx 1858 drivers/crypto/atmel-aes.c struct atmel_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 1862 drivers/crypto/atmel-aes.c atmel_aes_set_mode(dd, rctx); rctx 1988 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 1991 drivers/crypto/atmel-aes.c atmel_sha_authenc_abort(&rctx->auth_req); rctx 1998 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2003 drivers/crypto/atmel-aes.c atmel_aes_set_mode(dd, &rctx->base); rctx 2009 drivers/crypto/atmel-aes.c return atmel_sha_authenc_schedule(&rctx->auth_req, ctx->auth, rctx 2017 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2028 drivers/crypto/atmel-aes.c return atmel_sha_authenc_init(&rctx->auth_req, rctx 2030 drivers/crypto/atmel-aes.c rctx->textlen, rctx 2038 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2050 drivers/crypto/atmel-aes.c src = scatterwalk_ffwd(rctx->src, req->src, req->assoclen); rctx 2054 drivers/crypto/atmel-aes.c dst = scatterwalk_ffwd(rctx->dst, req->dst, req->assoclen); rctx 2073 drivers/crypto/atmel-aes.c return atmel_aes_dma_start(dd, src, dst, rctx->textlen, rctx 2080 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2084 drivers/crypto/atmel-aes.c return atmel_sha_authenc_final(&rctx->auth_req, rctx 2085 drivers/crypto/atmel-aes.c rctx->digest, sizeof(rctx->digest), rctx 2093 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2096 drivers/crypto/atmel-aes.c u32 idigest[SHA512_DIGEST_SIZE / sizeof(u32)], *odigest = rctx->digest; rctx 2104 drivers/crypto/atmel-aes.c offs = req->assoclen + rctx->textlen; rctx 2208 drivers/crypto/atmel-aes.c struct atmel_aes_authenc_reqctx *rctx = aead_request_ctx(req); rctx 2218 drivers/crypto/atmel-aes.c rctx->textlen = req->cryptlen - (enc ? 0 : authsize); rctx 2225 drivers/crypto/atmel-aes.c if (!rctx->textlen && !req->assoclen) rctx 2228 drivers/crypto/atmel-aes.c rctx->base.mode = mode; rctx 590 drivers/crypto/atmel-tdes.c struct atmel_tdes_reqctx *rctx; rctx 623 drivers/crypto/atmel-tdes.c rctx = ablkcipher_request_ctx(req); rctx 625 drivers/crypto/atmel-tdes.c rctx->mode &= TDES_FLAGS_MODE_MASK; rctx 626 drivers/crypto/atmel-tdes.c dd->flags = (dd->flags & ~TDES_FLAGS_MODE_MASK) | rctx->mode; rctx 672 drivers/crypto/atmel-tdes.c struct atmel_tdes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 700 drivers/crypto/atmel-tdes.c rctx->mode = mode; rctx 134 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 140 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 144 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 151 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); rctx 156 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, rctx 160 drivers/crypto/bcm/cipher.c datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, rctx 161 drivers/crypto/bcm/cipher.c rctx->dst_nents, chunksize); rctx 170 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.c.supdt_tweak, SPU_SUPDT_LEN); rctx 173 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); rctx 175 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); rctx 176 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len()); rctx 202 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 207 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 212 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 219 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr, rctx 225 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.iv_ctr, SPU_XTS_TWEAK_SIZE); rctx 228 drivers/crypto/bcm/cipher.c datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, rctx 229 drivers/crypto/bcm/cipher.c rctx->src_nents, chunksize); rctx 237 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len); rctx 241 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.tx_stat, 0, stat_len); rctx 242 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len); rctx 303 drivers/crypto/bcm/cipher.c static int handle_ablkcipher_req(struct iproc_reqctx_s *rctx) rctx 306 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 309 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 335 drivers/crypto/bcm/cipher.c cipher_parms.iv_len = rctx->iv_ctr_len; rctx 337 drivers/crypto/bcm/cipher.c mssg = &rctx->mb_mssg; rctx 338 drivers/crypto/bcm/cipher.c chunk_start = rctx->src_sent; rctx 339 drivers/crypto/bcm/cipher.c remaining = rctx->total_todo - chunk_start; rctx 348 drivers/crypto/bcm/cipher.c rctx->src_sent += chunksize; rctx 349 drivers/crypto/bcm/cipher.c rctx->total_sent = rctx->src_sent; rctx 352 drivers/crypto/bcm/cipher.c rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); rctx 353 drivers/crypto/bcm/cipher.c rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); rctx 356 drivers/crypto/bcm/cipher.c rctx->is_encrypt && chunk_start) rctx 361 drivers/crypto/bcm/cipher.c sg_copy_part_to_buf(req->dst, rctx->msg_buf.iv_ctr, rctx 362 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len, rctx 363 drivers/crypto/bcm/cipher.c chunk_start - rctx->iv_ctr_len); rctx 365 drivers/crypto/bcm/cipher.c if (rctx->iv_ctr_len) { rctx 367 drivers/crypto/bcm/cipher.c __builtin_memcpy(local_iv_ctr, rctx->msg_buf.iv_ctr, rctx 368 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len); rctx 372 drivers/crypto/bcm/cipher.c !rctx->is_encrypt) { rctx 377 drivers/crypto/bcm/cipher.c sg_copy_part_to_buf(req->src, rctx->msg_buf.iv_ctr, rctx 378 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len, rctx 379 drivers/crypto/bcm/cipher.c rctx->src_sent - rctx->iv_ctr_len); rctx 390 drivers/crypto/bcm/cipher.c add_to_ctr(rctx->msg_buf.iv_ctr, chunksize >> 4); rctx 401 drivers/crypto/bcm/cipher.c cipher_parms.key_buf = rctx->msg_buf.c.supdt_tweak; rctx 404 drivers/crypto/bcm/cipher.c } else if (!rctx->is_encrypt) { rctx 422 drivers/crypto/bcm/cipher.c rctx->src_sent, chunk_start, remaining, chunksize); rctx 425 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.bcm_spu_req_hdr, ctx->bcm_spu_req_hdr, rctx 426 drivers/crypto/bcm/cipher.c sizeof(rctx->msg_buf.bcm_spu_req_hdr)); rctx 433 drivers/crypto/bcm/cipher.c spu->spu_cipher_req_finish(rctx->msg_buf.bcm_spu_req_hdr + BCM_HDR_LEN, rctx 434 drivers/crypto/bcm/cipher.c ctx->spu_req_hdr_len, !(rctx->is_encrypt), rctx 445 drivers/crypto/bcm/cipher.c spu->spu_request_pad(rctx->msg_buf.spu_req_pad, 0, rctx 447 drivers/crypto/bcm/cipher.c rctx->total_sent, stat_pad_len); rctx 450 drivers/crypto/bcm/cipher.c spu->spu_dump_msg_hdr(rctx->msg_buf.bcm_spu_req_hdr + BCM_HDR_LEN, rctx 453 drivers/crypto/bcm/cipher.c dump_sg(rctx->src_sg, rctx->src_skip, chunksize); rctx 454 drivers/crypto/bcm/cipher.c packet_dump(" pad: ", rctx->msg_buf.spu_req_pad, pad_len); rctx 462 drivers/crypto/bcm/cipher.c mssg->ctx = rctx; /* Will be returned in response */ rctx 465 drivers/crypto/bcm/cipher.c rx_frag_num += rctx->dst_nents; rctx 471 drivers/crypto/bcm/cipher.c err = spu_ablkcipher_rx_sg_create(mssg, rctx, rx_frag_num, chunksize, rctx 477 drivers/crypto/bcm/cipher.c tx_frag_num += rctx->src_nents; rctx 485 drivers/crypto/bcm/cipher.c err = spu_ablkcipher_tx_sg_create(mssg, rctx, tx_frag_num, chunksize, rctx 490 drivers/crypto/bcm/cipher.c err = mailbox_send_message(mssg, req->base.flags, rctx->chan_idx); rctx 502 drivers/crypto/bcm/cipher.c static void handle_ablkcipher_resp(struct iproc_reqctx_s *rctx) rctx 506 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 509 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 513 drivers/crypto/bcm/cipher.c payload_len = spu->spu_payload_length(rctx->msg_buf.spu_resp_hdr); rctx 527 drivers/crypto/bcm/cipher.c __func__, rctx->total_received, payload_len); rctx 529 drivers/crypto/bcm/cipher.c dump_sg(req->dst, rctx->total_received, payload_len); rctx 531 drivers/crypto/bcm/cipher.c packet_dump(" supdt ", rctx->msg_buf.c.supdt_tweak, rctx 534 drivers/crypto/bcm/cipher.c rctx->total_received += payload_len; rctx 535 drivers/crypto/bcm/cipher.c if (rctx->total_received == rctx->total_todo) { rctx 563 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 569 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 572 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 579 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); rctx 582 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.digest, digestsize); rctx 585 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); rctx 587 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); rctx 588 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len()); rctx 615 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 627 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 634 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr, rctx 638 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->hash_carry, hash_carry_len); rctx 642 drivers/crypto/bcm/cipher.c datalen = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, rctx 643 drivers/crypto/bcm/cipher.c rctx->src_nents, new_data_len); rctx 652 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len); rctx 656 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.tx_stat, 0, stat_len); rctx 657 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len); rctx 689 drivers/crypto/bcm/cipher.c static int handle_ahash_req(struct iproc_reqctx_s *rctx) rctx 692 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 697 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 732 drivers/crypto/bcm/cipher.c rctx->total_todo, rctx->total_sent); rctx 755 drivers/crypto/bcm/cipher.c mssg = &rctx->mb_mssg; rctx 756 drivers/crypto/bcm/cipher.c chunk_start = rctx->src_sent; rctx 762 drivers/crypto/bcm/cipher.c nbytes_to_hash = rctx->total_todo - rctx->total_sent; rctx 773 drivers/crypto/bcm/cipher.c if (!rctx->is_final) { rctx 774 drivers/crypto/bcm/cipher.c u8 *dest = rctx->hash_carry + rctx->hash_carry_len; rctx 783 drivers/crypto/bcm/cipher.c new_len = rem - rctx->hash_carry_len; rctx 785 drivers/crypto/bcm/cipher.c rctx->src_sent); rctx 786 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = rem; rctx 788 drivers/crypto/bcm/cipher.c rctx->hash_carry_len); rctx 790 drivers/crypto/bcm/cipher.c rctx->hash_carry, rctx 791 drivers/crypto/bcm/cipher.c rctx->hash_carry_len); rctx 798 drivers/crypto/bcm/cipher.c local_nbuf = rctx->hash_carry_len; rctx 799 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = 0; rctx 805 drivers/crypto/bcm/cipher.c rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, rctx 812 drivers/crypto/bcm/cipher.c hash_parms.type = spu->spu_hash_type(rctx->total_sent); rctx 819 drivers/crypto/bcm/cipher.c rctx->total_sent += chunksize; rctx 821 drivers/crypto/bcm/cipher.c rctx->src_sent += new_data_len; rctx 823 drivers/crypto/bcm/cipher.c if ((rctx->total_sent == rctx->total_todo) && rctx->is_final) rctx 835 drivers/crypto/bcm/cipher.c hash_parms.key_buf = rctx->incr_hash; rctx 842 drivers/crypto/bcm/cipher.c __func__, rctx->is_final, local_nbuf); rctx 852 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.bcm_spu_req_hdr, BCMHEADER, BCM_HDR_LEN); rctx 855 drivers/crypto/bcm/cipher.c spu_hdr_len = spu->spu_create_request(rctx->msg_buf.bcm_spu_req_hdr + rctx 880 drivers/crypto/bcm/cipher.c spu->spu_request_pad(rctx->msg_buf.spu_req_pad, data_pad_len, rctx 882 drivers/crypto/bcm/cipher.c ctx->auth.mode, rctx->total_sent, rctx 886 drivers/crypto/bcm/cipher.c spu->spu_dump_msg_hdr(rctx->msg_buf.bcm_spu_req_hdr + BCM_HDR_LEN, rctx 888 drivers/crypto/bcm/cipher.c packet_dump(" prebuf: ", rctx->hash_carry, local_nbuf); rctx 890 drivers/crypto/bcm/cipher.c dump_sg(rctx->src_sg, rctx->src_skip, new_data_len); rctx 891 drivers/crypto/bcm/cipher.c packet_dump(" pad: ", rctx->msg_buf.spu_req_pad, pad_len); rctx 899 drivers/crypto/bcm/cipher.c mssg->ctx = rctx; /* Will be returned in response */ rctx 902 drivers/crypto/bcm/cipher.c err = spu_ahash_rx_sg_create(mssg, rctx, rx_frag_num, digestsize, rctx 908 drivers/crypto/bcm/cipher.c tx_frag_num += rctx->src_nents; rctx 911 drivers/crypto/bcm/cipher.c err = spu_ahash_tx_sg_create(mssg, rctx, tx_frag_num, spu_hdr_len, rctx 916 drivers/crypto/bcm/cipher.c err = mailbox_send_message(mssg, req->base.flags, rctx->chan_idx); rctx 980 drivers/crypto/bcm/cipher.c static int ahash_req_done(struct iproc_reqctx_s *rctx) rctx 983 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 985 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 988 drivers/crypto/bcm/cipher.c memcpy(req->result, rctx->msg_buf.digest, ctx->digestsize); rctx 1006 drivers/crypto/bcm/cipher.c if (rctx->is_sw_hmac) { rctx 1013 drivers/crypto/bcm/cipher.c if (rctx->is_sw_hmac || ctx->auth.mode == HASH_MODE_HMAC) { rctx 1030 drivers/crypto/bcm/cipher.c static void handle_ahash_resp(struct iproc_reqctx_s *rctx) rctx 1032 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 1034 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 1044 drivers/crypto/bcm/cipher.c memcpy(rctx->incr_hash, rctx->msg_buf.digest, MAX_DIGEST_SIZE); rctx 1051 drivers/crypto/bcm/cipher.c if (rctx->is_final && (rctx->total_sent == rctx->total_todo)) rctx 1052 drivers/crypto/bcm/cipher.c ahash_req_done(rctx); rctx 1081 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 1089 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 1104 drivers/crypto/bcm/cipher.c rctx->is_encrypt); rctx 1118 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 1126 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_resp_hdr, ctx->spu_resp_hdr_len); rctx 1133 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.a.resp_aad, 0, assoc_buf_len); rctx 1134 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.a.resp_aad, assoc_buf_len); rctx 1142 drivers/crypto/bcm/cipher.c datalen = spu_msg_sg_add(&sg, &rctx->dst_sg, &rctx->dst_skip, rctx 1143 drivers/crypto/bcm/cipher.c rctx->dst_nents, resp_len); rctx 1153 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.a.gcmpad, 0, data_padlen); rctx 1154 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.a.gcmpad, data_padlen); rctx 1158 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.digest, digestsize); rctx 1162 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.rx_stat_pad, 0, stat_pad_len); rctx 1163 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.rx_stat_pad, stat_pad_len); rctx 1166 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.rx_stat, 0, SPU_RX_STATUS_LEN); rctx 1167 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.rx_stat, spu->spu_rx_status_len()); rctx 1200 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx, rctx 1213 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 1220 drivers/crypto/bcm/cipher.c rctx->gfp); rctx 1227 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.bcm_spu_req_hdr, rctx 1242 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.iv_ctr, aead_iv_len); rctx 1245 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.a.req_aad_pad, 0, aad_pad_len); rctx 1246 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.a.req_aad_pad, aad_pad_len); rctx 1254 drivers/crypto/bcm/cipher.c written = spu_msg_sg_add(&sg, &rctx->src_sg, &rctx->src_skip, rctx 1255 drivers/crypto/bcm/cipher.c rctx->src_nents, datalen); rctx 1264 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.spu_req_pad, 0, pad_len); rctx 1265 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.spu_req_pad, pad_len); rctx 1269 drivers/crypto/bcm/cipher.c sg_set_buf(sg++, rctx->msg_buf.digest, ctx->digestsize); rctx 1273 drivers/crypto/bcm/cipher.c memset(rctx->msg_buf.tx_stat, 0, stat_len); rctx 1274 drivers/crypto/bcm/cipher.c sg_set_buf(sg, rctx->msg_buf.tx_stat, stat_len); rctx 1296 drivers/crypto/bcm/cipher.c static int handle_aead_req(struct iproc_reqctx_s *rctx) rctx 1299 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 1302 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 1325 drivers/crypto/bcm/cipher.c chunksize = rctx->total_todo; rctx 1333 drivers/crypto/bcm/cipher.c req_opts.is_inbound = !(rctx->is_encrypt); rctx 1343 drivers/crypto/bcm/cipher.c cipher_parms.iv_buf = rctx->msg_buf.iv_ctr; rctx 1344 drivers/crypto/bcm/cipher.c cipher_parms.iv_len = rctx->iv_ctr_len; rctx 1366 drivers/crypto/bcm/cipher.c if (rctx->is_encrypt) { rctx 1381 drivers/crypto/bcm/cipher.c rctx->src_nents = spu_sg_count(rctx->src_sg, rctx->src_skip, chunksize); rctx 1382 drivers/crypto/bcm/cipher.c rctx->dst_nents = spu_sg_count(rctx->dst_sg, rctx->dst_skip, chunksize); rctx 1384 drivers/crypto/bcm/cipher.c assoc_nents = spu_sg_count(rctx->assoc, 0, rctx 1387 drivers/crypto/bcm/cipher.c mssg = &rctx->mb_mssg; rctx 1389 drivers/crypto/bcm/cipher.c rctx->total_sent = chunksize; rctx 1390 drivers/crypto/bcm/cipher.c rctx->src_sent = chunksize; rctx 1394 drivers/crypto/bcm/cipher.c rctx->is_encrypt)) rctx 1398 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len); rctx 1424 drivers/crypto/bcm/cipher.c if (!rctx->is_encrypt) rctx 1431 drivers/crypto/bcm/cipher.c chunksize, rctx->is_encrypt, rctx 1441 drivers/crypto/bcm/cipher.c if (!rctx->is_encrypt) rctx 1454 drivers/crypto/bcm/cipher.c if (spu_req_incl_icv(ctx->cipher.mode, rctx->is_encrypt)) { rctx 1458 drivers/crypto/bcm/cipher.c sg_copy_part_to_buf(req->src, rctx->msg_buf.digest, digestsize, rctx 1459 drivers/crypto/bcm/cipher.c req->assoclen + rctx->total_sent - rctx 1468 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.bcm_spu_req_hdr, BCMHEADER, BCM_HDR_LEN); rctx 1470 drivers/crypto/bcm/cipher.c spu_hdr_len = spu->spu_create_request(rctx->msg_buf.bcm_spu_req_hdr + rctx 1487 drivers/crypto/bcm/cipher.c spu->spu_request_pad(rctx->msg_buf.spu_req_pad, rctx 1490 drivers/crypto/bcm/cipher.c rctx->total_sent, stat_pad_len); rctx 1493 drivers/crypto/bcm/cipher.c spu->spu_dump_msg_hdr(rctx->msg_buf.bcm_spu_req_hdr + BCM_HDR_LEN, rctx 1495 drivers/crypto/bcm/cipher.c dump_sg(rctx->assoc, 0, aead_parms.assoc_size); rctx 1496 drivers/crypto/bcm/cipher.c packet_dump(" aead iv: ", rctx->msg_buf.iv_ctr, aead_parms.iv_len); rctx 1498 drivers/crypto/bcm/cipher.c dump_sg(rctx->src_sg, rctx->src_skip, chunksize); rctx 1499 drivers/crypto/bcm/cipher.c packet_dump(" pad: ", rctx->msg_buf.spu_req_pad, pad_len); rctx 1507 drivers/crypto/bcm/cipher.c mssg->ctx = rctx; /* Will be returned in response */ rctx 1510 drivers/crypto/bcm/cipher.c rx_frag_num += rctx->dst_nents; rctx 1521 drivers/crypto/bcm/cipher.c (ctx->cipher.mode == CIPHER_MODE_CCM)) && !rctx->is_encrypt) { rctx 1529 drivers/crypto/bcm/cipher.c rx_frag_num -= rctx->dst_nents; rctx 1532 drivers/crypto/bcm/cipher.c err = spu_aead_rx_sg_create(mssg, req, rctx, rx_frag_num, rctx 1540 drivers/crypto/bcm/cipher.c tx_frag_num += rctx->src_nents; rctx 1548 drivers/crypto/bcm/cipher.c err = spu_aead_tx_sg_create(mssg, rctx, tx_frag_num, spu_hdr_len, rctx 1549 drivers/crypto/bcm/cipher.c rctx->assoc, aead_parms.assoc_size, rctx 1555 drivers/crypto/bcm/cipher.c err = mailbox_send_message(mssg, req->base.flags, rctx->chan_idx); rctx 1566 drivers/crypto/bcm/cipher.c static void handle_aead_resp(struct iproc_reqctx_s *rctx) rctx 1569 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 1572 drivers/crypto/bcm/cipher.c struct iproc_ctx_s *ctx = rctx->ctx; rctx 1578 drivers/crypto/bcm/cipher.c payload_len = spu->spu_payload_length(rctx->msg_buf.spu_resp_hdr); rctx 1585 drivers/crypto/bcm/cipher.c packet_dump(" assoc_data ", rctx->msg_buf.a.resp_aad, rctx 1594 drivers/crypto/bcm/cipher.c if (rctx->is_encrypt) { rctx 1595 drivers/crypto/bcm/cipher.c icv_offset = req->assoclen + rctx->total_sent; rctx 1596 drivers/crypto/bcm/cipher.c packet_dump(" ICV: ", rctx->msg_buf.digest, ctx->digestsize); rctx 1598 drivers/crypto/bcm/cipher.c sg_copy_part_from_buf(req->dst, rctx->msg_buf.digest, rctx 1626 drivers/crypto/bcm/cipher.c static void spu_chunk_cleanup(struct iproc_reqctx_s *rctx) rctx 1629 drivers/crypto/bcm/cipher.c struct brcm_message *mssg = &rctx->mb_mssg; rctx 1644 drivers/crypto/bcm/cipher.c static void finish_req(struct iproc_reqctx_s *rctx, int err) rctx 1646 drivers/crypto/bcm/cipher.c struct crypto_async_request *areq = rctx->parent; rctx 1651 drivers/crypto/bcm/cipher.c spu_chunk_cleanup(rctx); rctx 1666 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx; rctx 1669 drivers/crypto/bcm/cipher.c rctx = mssg->ctx; rctx 1670 drivers/crypto/bcm/cipher.c if (unlikely(!rctx)) { rctx 1678 drivers/crypto/bcm/cipher.c err = spu->spu_status_process(rctx->msg_buf.rx_stat); rctx 1687 drivers/crypto/bcm/cipher.c switch (rctx->ctx->alg->type) { rctx 1689 drivers/crypto/bcm/cipher.c handle_ablkcipher_resp(rctx); rctx 1692 drivers/crypto/bcm/cipher.c handle_ahash_resp(rctx); rctx 1695 drivers/crypto/bcm/cipher.c handle_aead_resp(rctx); rctx 1706 drivers/crypto/bcm/cipher.c if (rctx->total_sent < rctx->total_todo) { rctx 1708 drivers/crypto/bcm/cipher.c spu_chunk_cleanup(rctx); rctx 1710 drivers/crypto/bcm/cipher.c switch (rctx->ctx->alg->type) { rctx 1712 drivers/crypto/bcm/cipher.c err = handle_ablkcipher_req(rctx); rctx 1715 drivers/crypto/bcm/cipher.c err = handle_ahash_req(rctx); rctx 1724 drivers/crypto/bcm/cipher.c err = handle_aead_req(rctx); rctx 1736 drivers/crypto/bcm/cipher.c finish_req(rctx, err); rctx 1752 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ablkcipher_request_ctx(req); rctx 1759 drivers/crypto/bcm/cipher.c rctx->gfp = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | rctx 1761 drivers/crypto/bcm/cipher.c rctx->parent = &req->base; rctx 1762 drivers/crypto/bcm/cipher.c rctx->is_encrypt = encrypt; rctx 1763 drivers/crypto/bcm/cipher.c rctx->bd_suppress = false; rctx 1764 drivers/crypto/bcm/cipher.c rctx->total_todo = req->nbytes; rctx 1765 drivers/crypto/bcm/cipher.c rctx->src_sent = 0; rctx 1766 drivers/crypto/bcm/cipher.c rctx->total_sent = 0; rctx 1767 drivers/crypto/bcm/cipher.c rctx->total_received = 0; rctx 1768 drivers/crypto/bcm/cipher.c rctx->ctx = ctx; rctx 1771 drivers/crypto/bcm/cipher.c rctx->src_sg = req->src; rctx 1772 drivers/crypto/bcm/cipher.c rctx->src_nents = 0; rctx 1773 drivers/crypto/bcm/cipher.c rctx->src_skip = 0; rctx 1774 drivers/crypto/bcm/cipher.c rctx->dst_sg = req->dst; rctx 1775 drivers/crypto/bcm/cipher.c rctx->dst_nents = 0; rctx 1776 drivers/crypto/bcm/cipher.c rctx->dst_skip = 0; rctx 1784 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len = rctx 1786 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.iv_ctr, req->info, rctx->iv_ctr_len); rctx 1788 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len = 0; rctx 1792 drivers/crypto/bcm/cipher.c rctx->chan_idx = select_channel(); rctx 1793 drivers/crypto/bcm/cipher.c err = handle_ablkcipher_req(rctx); rctx 1796 drivers/crypto/bcm/cipher.c spu_chunk_cleanup(rctx); rctx 1968 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 1976 drivers/crypto/bcm/cipher.c rctx->gfp = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | rctx 1978 drivers/crypto/bcm/cipher.c rctx->parent = &req->base; rctx 1979 drivers/crypto/bcm/cipher.c rctx->ctx = ctx; rctx 1980 drivers/crypto/bcm/cipher.c rctx->bd_suppress = true; rctx 1981 drivers/crypto/bcm/cipher.c memset(&rctx->mb_mssg, 0, sizeof(struct brcm_message)); rctx 1984 drivers/crypto/bcm/cipher.c rctx->src_sg = req->src; rctx 1985 drivers/crypto/bcm/cipher.c rctx->src_skip = 0; rctx 1986 drivers/crypto/bcm/cipher.c rctx->src_nents = 0; rctx 1987 drivers/crypto/bcm/cipher.c rctx->dst_sg = NULL; rctx 1988 drivers/crypto/bcm/cipher.c rctx->dst_skip = 0; rctx 1989 drivers/crypto/bcm/cipher.c rctx->dst_nents = 0; rctx 1992 drivers/crypto/bcm/cipher.c if ((rctx->is_final == 1) && (rctx->total_todo == 0) && rctx 1996 drivers/crypto/bcm/cipher.c rctx->is_final ? "" : "non-", alg_name); rctx 2005 drivers/crypto/bcm/cipher.c rctx->chan_idx = select_channel(); rctx 2007 drivers/crypto/bcm/cipher.c err = handle_ahash_req(rctx); rctx 2010 drivers/crypto/bcm/cipher.c spu_chunk_cleanup(rctx); rctx 2025 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2032 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = 0; rctx 2033 drivers/crypto/bcm/cipher.c rctx->is_final = 0; rctx 2035 drivers/crypto/bcm/cipher.c rctx->total_todo = 0; rctx 2036 drivers/crypto/bcm/cipher.c rctx->src_sent = 0; rctx 2037 drivers/crypto/bcm/cipher.c rctx->total_sent = 0; rctx 2038 drivers/crypto/bcm/cipher.c rctx->total_received = 0; rctx 2044 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = false; rctx 2141 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2147 drivers/crypto/bcm/cipher.c rctx->total_todo += req->nbytes; rctx 2148 drivers/crypto/bcm/cipher.c rctx->src_sent = 0; rctx 2199 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2203 drivers/crypto/bcm/cipher.c rctx->is_final = 1; rctx 2236 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2240 drivers/crypto/bcm/cipher.c rctx->total_todo += req->nbytes; rctx 2241 drivers/crypto/bcm/cipher.c rctx->src_sent = 0; rctx 2242 drivers/crypto/bcm/cipher.c rctx->is_final = 1; rctx 2351 drivers/crypto/bcm/cipher.c const struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2354 drivers/crypto/bcm/cipher.c spu_exp->total_todo = rctx->total_todo; rctx 2355 drivers/crypto/bcm/cipher.c spu_exp->total_sent = rctx->total_sent; rctx 2356 drivers/crypto/bcm/cipher.c spu_exp->is_sw_hmac = rctx->is_sw_hmac; rctx 2357 drivers/crypto/bcm/cipher.c memcpy(spu_exp->hash_carry, rctx->hash_carry, sizeof(rctx->hash_carry)); rctx 2358 drivers/crypto/bcm/cipher.c spu_exp->hash_carry_len = rctx->hash_carry_len; rctx 2359 drivers/crypto/bcm/cipher.c memcpy(spu_exp->incr_hash, rctx->incr_hash, sizeof(rctx->incr_hash)); rctx 2366 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2369 drivers/crypto/bcm/cipher.c rctx->total_todo = spu_exp->total_todo; rctx 2370 drivers/crypto/bcm/cipher.c rctx->total_sent = spu_exp->total_sent; rctx 2371 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = spu_exp->is_sw_hmac; rctx 2372 drivers/crypto/bcm/cipher.c memcpy(rctx->hash_carry, spu_exp->hash_carry, sizeof(rctx->hash_carry)); rctx 2373 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = spu_exp->hash_carry_len; rctx 2374 drivers/crypto/bcm/cipher.c memcpy(rctx->incr_hash, spu_exp->incr_hash, sizeof(rctx->incr_hash)); rctx 2481 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2494 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = true; rctx 2497 drivers/crypto/bcm/cipher.c memcpy(rctx->hash_carry, ctx->ipad, blocksize); rctx 2498 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = blocksize; rctx 2499 drivers/crypto/bcm/cipher.c rctx->total_todo += blocksize; rctx 2531 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = ahash_request_ctx(req); rctx 2551 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = false; rctx 2554 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = true; rctx 2557 drivers/crypto/bcm/cipher.c memcpy(rctx->hash_carry, ctx->ipad, blocksize); rctx 2558 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = blocksize; rctx 2559 drivers/crypto/bcm/cipher.c rctx->total_todo += blocksize; rctx 2569 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = aead_request_ctx(req); rctx 2582 drivers/crypto/bcm/cipher.c if ((rctx->is_encrypt && (req->cryptlen == 0)) || rctx 2583 drivers/crypto/bcm/cipher.c (!rctx->is_encrypt && (req->cryptlen == ctx->digestsize))) { rctx 2617 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len == GCM_RFC4106_IV_SIZE && rctx 2640 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = aead_request_ctx(req); rctx 2647 drivers/crypto/bcm/cipher.c areq->complete = rctx->old_complete; rctx 2648 drivers/crypto/bcm/cipher.c areq->data = rctx->old_data; rctx 2657 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = aead_request_ctx(req); rctx 2666 drivers/crypto/bcm/cipher.c rctx->old_tfm = tfm; rctx 2672 drivers/crypto/bcm/cipher.c rctx->old_complete = req->base.complete; rctx 2673 drivers/crypto/bcm/cipher.c rctx->old_data = req->base.data; rctx 2685 drivers/crypto/bcm/cipher.c rctx->old_complete, req); rctx 2686 drivers/crypto/bcm/cipher.c req->base.data = rctx->old_data; rctx 2700 drivers/crypto/bcm/cipher.c struct iproc_reqctx_s *rctx = aead_request_ctx(req); rctx 2714 drivers/crypto/bcm/cipher.c rctx->gfp = (req->base.flags & (CRYPTO_TFM_REQ_MAY_BACKLOG | rctx 2716 drivers/crypto/bcm/cipher.c rctx->parent = &req->base; rctx 2717 drivers/crypto/bcm/cipher.c rctx->is_encrypt = is_encrypt; rctx 2718 drivers/crypto/bcm/cipher.c rctx->bd_suppress = false; rctx 2719 drivers/crypto/bcm/cipher.c rctx->total_todo = req->cryptlen; rctx 2720 drivers/crypto/bcm/cipher.c rctx->src_sent = 0; rctx 2721 drivers/crypto/bcm/cipher.c rctx->total_sent = 0; rctx 2722 drivers/crypto/bcm/cipher.c rctx->total_received = 0; rctx 2723 drivers/crypto/bcm/cipher.c rctx->is_sw_hmac = false; rctx 2724 drivers/crypto/bcm/cipher.c rctx->ctx = ctx; rctx 2725 drivers/crypto/bcm/cipher.c memset(&rctx->mb_mssg, 0, sizeof(struct brcm_message)); rctx 2728 drivers/crypto/bcm/cipher.c rctx->assoc = req->src; rctx 2735 drivers/crypto/bcm/cipher.c if (spu_sg_at_offset(req->src, req->assoclen, &rctx->src_sg, rctx 2736 drivers/crypto/bcm/cipher.c &rctx->src_skip) < 0) { rctx 2742 drivers/crypto/bcm/cipher.c rctx->src_nents = 0; rctx 2743 drivers/crypto/bcm/cipher.c rctx->dst_nents = 0; rctx 2745 drivers/crypto/bcm/cipher.c rctx->dst_sg = rctx->src_sg; rctx 2746 drivers/crypto/bcm/cipher.c rctx->dst_skip = rctx->src_skip; rctx 2753 drivers/crypto/bcm/cipher.c if (spu_sg_at_offset(req->dst, req->assoclen, &rctx->dst_sg, rctx 2754 drivers/crypto/bcm/cipher.c &rctx->dst_skip) < 0) { rctx 2766 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len = rctx 2770 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len = CCM_AES_IV_SIZE; rctx 2772 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len = 0; rctx 2775 drivers/crypto/bcm/cipher.c rctx->hash_carry_len = 0; rctx 2779 drivers/crypto/bcm/cipher.c rctx->src_sg, rctx->src_skip); rctx 2780 drivers/crypto/bcm/cipher.c flow_log(" assoc: %p, assoclen %u\n", rctx->assoc, req->assoclen); rctx 2783 drivers/crypto/bcm/cipher.c rctx->dst_sg, rctx->dst_skip); rctx 2784 drivers/crypto/bcm/cipher.c flow_log(" iv_ctr_len:%u\n", rctx->iv_ctr_len); rctx 2785 drivers/crypto/bcm/cipher.c flow_dump(" iv: ", req->iv, rctx->iv_ctr_len); rctx 2801 drivers/crypto/bcm/cipher.c if (rctx->iv_ctr_len) { rctx 2803 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.iv_ctr + ctx->salt_offset, rctx 2805 drivers/crypto/bcm/cipher.c memcpy(rctx->msg_buf.iv_ctr + ctx->salt_offset + ctx->salt_len, rctx 2807 drivers/crypto/bcm/cipher.c rctx->iv_ctr_len - ctx->salt_len - ctx->salt_offset); rctx 2810 drivers/crypto/bcm/cipher.c rctx->chan_idx = select_channel(); rctx 2811 drivers/crypto/bcm/cipher.c err = handle_aead_req(rctx); rctx 2814 drivers/crypto/bcm/cipher.c spu_chunk_cleanup(rctx); rctx 100 drivers/crypto/cavium/cpt/cptvf_algs.c struct cvm_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 101 drivers/crypto/cavium/cpt/cptvf_algs.c struct fc_context *fctx = &rctx->fctx; rctx 102 drivers/crypto/cavium/cpt/cptvf_algs.c u64 *offset_control = &rctx->control_word; rctx 104 drivers/crypto/cavium/cpt/cptvf_algs.c struct cpt_request_info *req_info = &rctx->cpt_req; rctx 153 drivers/crypto/cavium/cpt/cptvf_algs.c struct cvm_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 154 drivers/crypto/cavium/cpt/cptvf_algs.c struct cpt_request_info *req_info = &rctx->cpt_req; rctx 175 drivers/crypto/cavium/cpt/cptvf_algs.c struct cvm_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 176 drivers/crypto/cavium/cpt/cptvf_algs.c struct cpt_request_info *req_info = &rctx->cpt_req; rctx 195 drivers/crypto/cavium/cpt/cptvf_algs.c struct cvm_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 197 drivers/crypto/cavium/cpt/cptvf_algs.c struct fc_context *fctx = &rctx->fctx; rctx 198 drivers/crypto/cavium/cpt/cptvf_algs.c struct cpt_request_info *req_info = &rctx->cpt_req; rctx 135 drivers/crypto/cavium/nitrox/nitrox_aead.c static int nitrox_set_creq(struct nitrox_aead_rctx *rctx) rctx 137 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; rctx 141 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->flags = rctx->flags; rctx 142 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gfp = (rctx->flags & CRYPTO_TFM_REQ_MAY_SLEEP) ? GFP_KERNEL : rctx 147 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctrl.s.arg = rctx->ctrl_arg; rctx 149 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param0 = cpu_to_be16(rctx->cryptlen); rctx 150 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param1 = cpu_to_be16(rctx->cryptlen + rctx->assoclen); rctx 151 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->gph.param2 = cpu_to_be16(rctx->ivsize + rctx->assoclen); rctx 153 drivers/crypto/cavium/nitrox/nitrox_aead.c param3.auth_offset = rctx->ivsize; rctx 156 drivers/crypto/cavium/nitrox/nitrox_aead.c creq->ctx_handle = rctx->ctx_handle; rctx 159 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = alloc_src_sglist(&rctx->nkreq, rctx->src, rctx->iv, rctx->ivsize, rctx 160 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen); rctx 164 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = alloc_dst_sglist(&rctx->nkreq, rctx->dst, rctx->ivsize, rctx 165 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dstlen); rctx 167 drivers/crypto/cavium/nitrox/nitrox_aead.c free_src_sglist(&rctx->nkreq); rctx 177 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); rctx 179 drivers/crypto/cavium/nitrox/nitrox_aead.c free_src_sglist(&rctx->nkreq); rctx 180 drivers/crypto/cavium/nitrox/nitrox_aead.c free_dst_sglist(&rctx->nkreq); rctx 193 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); rctx 194 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; rctx 200 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen; rctx 201 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; rctx 202 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->assoclen + areq->cryptlen; rctx 203 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dstlen = rctx->srclen + aead->authsize; rctx 204 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; rctx 205 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; rctx 206 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->flags = areq->base.flags; rctx 207 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ctx_handle = nctx->u.ctx_handle; rctx 208 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->src = areq->src; rctx 209 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; rctx 210 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ctrl_arg = ENCRYPT; rctx 211 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = nitrox_set_creq(rctx); rctx 224 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *rctx = aead_request_ctx(areq); rctx 225 drivers/crypto/cavium/nitrox/nitrox_aead.c struct se_crypto_request *creq = &rctx->nkreq.creq; rctx 231 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->cryptlen = areq->cryptlen - aead->authsize; rctx 232 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->assoclen = areq->assoclen; rctx 233 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->srclen = areq->cryptlen + areq->assoclen; rctx 234 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dstlen = rctx->srclen - aead->authsize; rctx 235 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->iv = &areq->iv[GCM_AES_SALT_SIZE]; rctx 236 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ivsize = GCM_AES_IV_SIZE - GCM_AES_SALT_SIZE; rctx 237 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->flags = areq->base.flags; rctx 238 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ctx_handle = nctx->u.ctx_handle; rctx 239 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->src = areq->src; rctx 240 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->dst = areq->dst; rctx 241 drivers/crypto/cavium/nitrox/nitrox_aead.c rctx->ctrl_arg = DECRYPT; rctx 242 drivers/crypto/cavium/nitrox/nitrox_aead.c ret = nitrox_set_creq(rctx); rctx 365 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); rctx 366 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *aead_rctx = &rctx->base; rctx 373 drivers/crypto/cavium/nitrox/nitrox_aead.c scatterwalk_map_and_copy(rctx->assoc, areq->src, 0, assoclen, 0); rctx 374 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_init_table(rctx->src, 3); rctx 375 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_set_buf(rctx->src, rctx->assoc, assoclen); rctx 376 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->src + 1, areq->src, areq->assoclen); rctx 377 drivers/crypto/cavium/nitrox/nitrox_aead.c if (sg != rctx->src + 1) rctx 378 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_chain(rctx->src, 2, sg); rctx 381 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_init_table(rctx->dst, 3); rctx 382 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_set_buf(rctx->dst, rctx->assoc, assoclen); rctx 383 drivers/crypto/cavium/nitrox/nitrox_aead.c sg = scatterwalk_ffwd(rctx->dst + 1, areq->dst, areq->assoclen); rctx 384 drivers/crypto/cavium/nitrox/nitrox_aead.c if (sg != rctx->dst + 1) rctx 385 drivers/crypto/cavium/nitrox/nitrox_aead.c sg_chain(rctx->dst, 2, sg); rctx 388 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->src = rctx->src; rctx 389 drivers/crypto/cavium/nitrox/nitrox_aead.c aead_rctx->dst = (areq->src == areq->dst) ? rctx->src : rctx->dst; rctx 397 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); rctx 398 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_kcrypt_request *nkreq = &rctx->base.nkreq; rctx 414 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); rctx 415 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *aead_rctx = &rctx->base; rctx 446 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_rfc4106_rctx *rctx = aead_request_ctx(areq); rctx 447 drivers/crypto/cavium/nitrox/nitrox_aead.c struct nitrox_aead_rctx *aead_rctx = &rctx->base; rctx 28 drivers/crypto/ccp/ccp-crypto-aes-cmac.c struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); rctx 34 drivers/crypto/ccp/ccp-crypto-aes-cmac.c if (rctx->hash_rem) { rctx 36 drivers/crypto/ccp/ccp-crypto-aes-cmac.c unsigned int offset = rctx->nbytes - rctx->hash_rem; rctx 38 drivers/crypto/ccp/ccp-crypto-aes-cmac.c scatterwalk_map_and_copy(rctx->buf, rctx->src, rctx 39 drivers/crypto/ccp/ccp-crypto-aes-cmac.c offset, rctx->hash_rem, 0); rctx 40 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->buf_count = rctx->hash_rem; rctx 42 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->buf_count = 0; rctx 46 drivers/crypto/ccp/ccp-crypto-aes-cmac.c if (req->result && rctx->final) rctx 47 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memcpy(req->result, rctx->iv, digest_size); rctx 50 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg_free_table(&rctx->data_sg); rctx 60 drivers/crypto/ccp/ccp-crypto-aes-cmac.c struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); rctx 73 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->null_msg = 0; rctx 75 drivers/crypto/ccp/ccp-crypto-aes-cmac.c len = (u64)rctx->buf_count + (u64)nbytes; rctx 78 drivers/crypto/ccp/ccp-crypto-aes-cmac.c scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, rctx 80 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->buf_count += nbytes; rctx 85 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->src = req->src; rctx 86 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->nbytes = nbytes; rctx 88 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->final = final; rctx 89 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->hash_rem = final ? 0 : len & (block_size - 1); rctx 90 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->hash_cnt = len - rctx->hash_rem; rctx 91 drivers/crypto/ccp/ccp-crypto-aes-cmac.c if (!final && !rctx->hash_rem) { rctx 93 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->hash_cnt -= block_size; rctx 94 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->hash_rem = block_size; rctx 97 drivers/crypto/ccp/ccp-crypto-aes-cmac.c if (final && (rctx->null_msg || (len & (block_size - 1)))) rctx 102 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg_init_one(&rctx->iv_sg, rctx->iv, sizeof(rctx->iv)); rctx 110 drivers/crypto/ccp/ccp-crypto-aes-cmac.c ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); rctx 115 drivers/crypto/ccp/ccp-crypto-aes-cmac.c if (rctx->buf_count) { rctx 116 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); rctx 117 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); rctx 125 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); rctx 135 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->hash_cnt += pad_length; rctx 137 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memset(rctx->pad, 0, sizeof(rctx->pad)); rctx 138 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->pad[0] = 0x80; rctx 139 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg_init_one(&rctx->pad_sg, rctx->pad, pad_length); rctx 140 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->pad_sg); rctx 148 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg = rctx->data_sg.sgl; rctx 156 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 157 drivers/crypto/ccp/ccp-crypto-aes-cmac.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 158 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.engine = CCP_ENGINE_AES; rctx 159 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.type = ctx->u.aes.type; rctx 160 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.mode = ctx->u.aes.mode; rctx 161 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.action = CCP_AES_ACTION_ENCRYPT; rctx 162 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; rctx 163 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; rctx 164 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.iv = &rctx->iv_sg; rctx 165 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.iv_len = AES_BLOCK_SIZE; rctx 166 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.src = sg; rctx 167 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.src_len = rctx->hash_cnt; rctx 168 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.dst = NULL; rctx 169 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.cmac_key = cmac_key_sg; rctx 170 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.cmac_key_len = ctx->u.aes.kn_len; rctx 171 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->cmd.u.aes.cmac_final = final; rctx 173 drivers/crypto/ccp/ccp-crypto-aes-cmac.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 178 drivers/crypto/ccp/ccp-crypto-aes-cmac.c sg_free_table(&rctx->data_sg); rctx 185 drivers/crypto/ccp/ccp-crypto-aes-cmac.c struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); rctx 187 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memset(rctx, 0, sizeof(*rctx)); rctx 189 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->null_msg = 1; rctx 222 drivers/crypto/ccp/ccp-crypto-aes-cmac.c struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); rctx 228 drivers/crypto/ccp/ccp-crypto-aes-cmac.c state.null_msg = rctx->null_msg; rctx 229 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memcpy(state.iv, rctx->iv, sizeof(state.iv)); rctx 230 drivers/crypto/ccp/ccp-crypto-aes-cmac.c state.buf_count = rctx->buf_count; rctx 231 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memcpy(state.buf, rctx->buf, sizeof(state.buf)); rctx 241 drivers/crypto/ccp/ccp-crypto-aes-cmac.c struct ccp_aes_cmac_req_ctx *rctx = ahash_request_ctx(req); rctx 247 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memset(rctx, 0, sizeof(*rctx)); rctx 248 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->null_msg = state.null_msg; rctx 249 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memcpy(rctx->iv, state.iv, sizeof(rctx->iv)); rctx 250 drivers/crypto/ccp/ccp-crypto-aes-cmac.c rctx->buf_count = state.buf_count; rctx 251 drivers/crypto/ccp/ccp-crypto-aes-cmac.c memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); rctx 81 drivers/crypto/ccp/ccp-crypto-aes-galois.c struct ccp_aes_req_ctx *rctx = aead_request_ctx(req); rctx 106 drivers/crypto/ccp/ccp-crypto-aes-galois.c memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); rctx 108 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->iv[i + GCM_AES_IV_SIZE] = 0; rctx 109 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->iv[AES_BLOCK_SIZE - 1] = 1; rctx 112 drivers/crypto/ccp/ccp-crypto-aes-galois.c iv_sg = &rctx->iv_sg; rctx 114 drivers/crypto/ccp/ccp-crypto-aes-galois.c sg_init_one(iv_sg, rctx->iv, iv_len); rctx 117 drivers/crypto/ccp/ccp-crypto-aes-galois.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 118 drivers/crypto/ccp/ccp-crypto-aes-galois.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 119 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.engine = CCP_ENGINE_AES; rctx 120 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.authsize = crypto_aead_authsize(tfm); rctx 121 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.type = ctx->u.aes.type; rctx 122 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.mode = ctx->u.aes.mode; rctx 123 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.action = encrypt; rctx 124 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; rctx 125 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; rctx 126 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.iv = iv_sg; rctx 127 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.iv_len = iv_len; rctx 128 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.src = req->src; rctx 129 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.src_len = req->cryptlen; rctx 130 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.aad_len = req->assoclen; rctx 133 drivers/crypto/ccp/ccp-crypto-aes-galois.c rctx->cmd.u.aes.dst = req->dst; rctx 135 drivers/crypto/ccp/ccp-crypto-aes-galois.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 65 drivers/crypto/ccp/ccp-crypto-aes-xts.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 70 drivers/crypto/ccp/ccp-crypto-aes-xts.c memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); rctx 109 drivers/crypto/ccp/ccp-crypto-aes-xts.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 165 drivers/crypto/ccp/ccp-crypto-aes-xts.c memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); rctx 166 drivers/crypto/ccp/ccp-crypto-aes-xts.c sg_init_one(&rctx->iv_sg, rctx->iv, AES_BLOCK_SIZE); rctx 168 drivers/crypto/ccp/ccp-crypto-aes-xts.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 169 drivers/crypto/ccp/ccp-crypto-aes-xts.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 170 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.engine = CCP_ENGINE_XTS_AES_128; rctx 171 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.type = CCP_AES_TYPE_128; rctx 172 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.action = (encrypt) ? CCP_AES_ACTION_ENCRYPT rctx 174 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.unit_size = unit_size; rctx 175 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.key = &ctx->u.aes.key_sg; rctx 176 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.key_len = ctx->u.aes.key_len; rctx 177 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.iv = &rctx->iv_sg; rctx 178 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.iv_len = AES_BLOCK_SIZE; rctx 179 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.src = req->src; rctx 180 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.src_len = req->nbytes; rctx 181 drivers/crypto/ccp/ccp-crypto-aes-xts.c rctx->cmd.u.xts.dst = req->dst; rctx 183 drivers/crypto/ccp/ccp-crypto-aes-xts.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 26 drivers/crypto/ccp/ccp-crypto-aes.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 32 drivers/crypto/ccp/ccp-crypto-aes.c memcpy(req->info, rctx->iv, AES_BLOCK_SIZE); rctx 70 drivers/crypto/ccp/ccp-crypto-aes.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 87 drivers/crypto/ccp/ccp-crypto-aes.c memcpy(rctx->iv, req->info, AES_BLOCK_SIZE); rctx 88 drivers/crypto/ccp/ccp-crypto-aes.c iv_sg = &rctx->iv_sg; rctx 90 drivers/crypto/ccp/ccp-crypto-aes.c sg_init_one(iv_sg, rctx->iv, iv_len); rctx 93 drivers/crypto/ccp/ccp-crypto-aes.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 94 drivers/crypto/ccp/ccp-crypto-aes.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 95 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.engine = CCP_ENGINE_AES; rctx 96 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.type = ctx->u.aes.type; rctx 97 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.mode = ctx->u.aes.mode; rctx 98 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.action = rctx 100 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.key = &ctx->u.aes.key_sg; rctx 101 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.key_len = ctx->u.aes.key_len; rctx 102 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.iv = iv_sg; rctx 103 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.iv_len = iv_len; rctx 104 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.src = req->src; rctx 105 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.src_len = req->nbytes; rctx 106 drivers/crypto/ccp/ccp-crypto-aes.c rctx->cmd.u.aes.dst = req->dst; rctx 108 drivers/crypto/ccp/ccp-crypto-aes.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 143 drivers/crypto/ccp/ccp-crypto-aes.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 146 drivers/crypto/ccp/ccp-crypto-aes.c req->info = rctx->rfc3686_info; rctx 168 drivers/crypto/ccp/ccp-crypto-aes.c struct ccp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 172 drivers/crypto/ccp/ccp-crypto-aes.c iv = rctx->rfc3686_iv; rctx 182 drivers/crypto/ccp/ccp-crypto-aes.c rctx->rfc3686_info = req->info; rctx 183 drivers/crypto/ccp/ccp-crypto-aes.c req->info = rctx->rfc3686_iv; rctx 25 drivers/crypto/ccp/ccp-crypto-des3.c struct ccp_des3_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 31 drivers/crypto/ccp/ccp-crypto-des3.c memcpy(req->info, rctx->iv, DES3_EDE_BLOCK_SIZE); rctx 64 drivers/crypto/ccp/ccp-crypto-des3.c struct ccp_des3_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 81 drivers/crypto/ccp/ccp-crypto-des3.c memcpy(rctx->iv, req->info, DES3_EDE_BLOCK_SIZE); rctx 82 drivers/crypto/ccp/ccp-crypto-des3.c iv_sg = &rctx->iv_sg; rctx 84 drivers/crypto/ccp/ccp-crypto-des3.c sg_init_one(iv_sg, rctx->iv, iv_len); rctx 87 drivers/crypto/ccp/ccp-crypto-des3.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 88 drivers/crypto/ccp/ccp-crypto-des3.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 89 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.engine = CCP_ENGINE_DES3; rctx 90 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.type = ctx->u.des3.type; rctx 91 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.mode = ctx->u.des3.mode; rctx 92 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.action = (encrypt) rctx 95 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.key = &ctx->u.des3.key_sg; rctx 96 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.key_len = ctx->u.des3.key_len; rctx 97 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.iv = iv_sg; rctx 98 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.iv_len = iv_len; rctx 99 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.src = req->src; rctx 100 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.src_len = req->nbytes; rctx 101 drivers/crypto/ccp/ccp-crypto-des3.c rctx->cmd.u.des3.dst = req->dst; rctx 103 drivers/crypto/ccp/ccp-crypto-des3.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 47 drivers/crypto/ccp/ccp-crypto-rsa.c struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx(req); rctx 52 drivers/crypto/ccp/ccp-crypto-rsa.c req->dst_len = rctx->cmd.u.rsa.key_size >> 3; rctx 68 drivers/crypto/ccp/ccp-crypto-rsa.c struct ccp_rsa_req_ctx *rctx = akcipher_request_ctx(req); rctx 71 drivers/crypto/ccp/ccp-crypto-rsa.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 72 drivers/crypto/ccp/ccp-crypto-rsa.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 73 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.engine = CCP_ENGINE_RSA; rctx 75 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.key_size = ctx->u.rsa.key_len; /* in bits */ rctx 77 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp = &ctx->u.rsa.e_sg; rctx 78 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp_len = ctx->u.rsa.e_len; rctx 80 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp = &ctx->u.rsa.d_sg; rctx 81 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.exp_len = ctx->u.rsa.d_len; rctx 83 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.mod = &ctx->u.rsa.n_sg; rctx 84 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.mod_len = ctx->u.rsa.n_len; rctx 85 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.src = req->src; rctx 86 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.src_len = req->src_len; rctx 87 drivers/crypto/ccp/ccp-crypto-rsa.c rctx->cmd.u.rsa.dst = req->dst; rctx 89 drivers/crypto/ccp/ccp-crypto-rsa.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 29 drivers/crypto/ccp/ccp-crypto-sha.c struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 35 drivers/crypto/ccp/ccp-crypto-sha.c if (rctx->hash_rem) { rctx 37 drivers/crypto/ccp/ccp-crypto-sha.c unsigned int offset = rctx->nbytes - rctx->hash_rem; rctx 39 drivers/crypto/ccp/ccp-crypto-sha.c scatterwalk_map_and_copy(rctx->buf, rctx->src, rctx 40 drivers/crypto/ccp/ccp-crypto-sha.c offset, rctx->hash_rem, 0); rctx 41 drivers/crypto/ccp/ccp-crypto-sha.c rctx->buf_count = rctx->hash_rem; rctx 43 drivers/crypto/ccp/ccp-crypto-sha.c rctx->buf_count = 0; rctx 47 drivers/crypto/ccp/ccp-crypto-sha.c if (req->result && rctx->final) rctx 48 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(req->result, rctx->ctx, digest_size); rctx 51 drivers/crypto/ccp/ccp-crypto-sha.c sg_free_table(&rctx->data_sg); rctx 61 drivers/crypto/ccp/ccp-crypto-sha.c struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 70 drivers/crypto/ccp/ccp-crypto-sha.c len = (u64)rctx->buf_count + (u64)nbytes; rctx 73 drivers/crypto/ccp/ccp-crypto-sha.c scatterwalk_map_and_copy(rctx->buf + rctx->buf_count, req->src, rctx 75 drivers/crypto/ccp/ccp-crypto-sha.c rctx->buf_count += nbytes; rctx 80 drivers/crypto/ccp/ccp-crypto-sha.c rctx->src = req->src; rctx 81 drivers/crypto/ccp/ccp-crypto-sha.c rctx->nbytes = nbytes; rctx 83 drivers/crypto/ccp/ccp-crypto-sha.c rctx->final = final; rctx 84 drivers/crypto/ccp/ccp-crypto-sha.c rctx->hash_rem = final ? 0 : len & (block_size - 1); rctx 85 drivers/crypto/ccp/ccp-crypto-sha.c rctx->hash_cnt = len - rctx->hash_rem; rctx 86 drivers/crypto/ccp/ccp-crypto-sha.c if (!final && !rctx->hash_rem) { rctx 88 drivers/crypto/ccp/ccp-crypto-sha.c rctx->hash_cnt -= block_size; rctx 89 drivers/crypto/ccp/ccp-crypto-sha.c rctx->hash_rem = block_size; rctx 93 drivers/crypto/ccp/ccp-crypto-sha.c sg_init_one(&rctx->ctx_sg, rctx->ctx, sizeof(rctx->ctx)); rctx 96 drivers/crypto/ccp/ccp-crypto-sha.c if (rctx->buf_count && nbytes) { rctx 103 drivers/crypto/ccp/ccp-crypto-sha.c ret = sg_alloc_table(&rctx->data_sg, sg_count, gfp); rctx 107 drivers/crypto/ccp/ccp-crypto-sha.c sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); rctx 108 drivers/crypto/ccp/ccp-crypto-sha.c sg = ccp_crypto_sg_table_add(&rctx->data_sg, &rctx->buf_sg); rctx 113 drivers/crypto/ccp/ccp-crypto-sha.c sg = ccp_crypto_sg_table_add(&rctx->data_sg, req->src); rctx 120 drivers/crypto/ccp/ccp-crypto-sha.c sg = rctx->data_sg.sgl; rctx 121 drivers/crypto/ccp/ccp-crypto-sha.c } else if (rctx->buf_count) { rctx 122 drivers/crypto/ccp/ccp-crypto-sha.c sg_init_one(&rctx->buf_sg, rctx->buf, rctx->buf_count); rctx 124 drivers/crypto/ccp/ccp-crypto-sha.c sg = &rctx->buf_sg; rctx 129 drivers/crypto/ccp/ccp-crypto-sha.c rctx->msg_bits += (rctx->hash_cnt << 3); /* Total in bits */ rctx 131 drivers/crypto/ccp/ccp-crypto-sha.c memset(&rctx->cmd, 0, sizeof(rctx->cmd)); rctx 132 drivers/crypto/ccp/ccp-crypto-sha.c INIT_LIST_HEAD(&rctx->cmd.entry); rctx 133 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.engine = CCP_ENGINE_SHA; rctx 134 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.type = rctx->type; rctx 135 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx = &rctx->ctx_sg; rctx 137 drivers/crypto/ccp/ccp-crypto-sha.c switch (rctx->type) { rctx 139 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx_len = SHA1_DIGEST_SIZE; rctx 142 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx_len = SHA224_DIGEST_SIZE; rctx 145 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx_len = SHA256_DIGEST_SIZE; rctx 148 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx_len = SHA384_DIGEST_SIZE; rctx 151 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.ctx_len = SHA512_DIGEST_SIZE; rctx 158 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.src = sg; rctx 159 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.src_len = rctx->hash_cnt; rctx 160 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.opad = ctx->u.sha.key_len ? rctx 162 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.opad_len = ctx->u.sha.key_len ? rctx 164 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.first = rctx->first; rctx 165 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.final = rctx->final; rctx 166 drivers/crypto/ccp/ccp-crypto-sha.c rctx->cmd.u.sha.msg_bits = rctx->msg_bits; rctx 168 drivers/crypto/ccp/ccp-crypto-sha.c rctx->first = 0; rctx 170 drivers/crypto/ccp/ccp-crypto-sha.c ret = ccp_crypto_enqueue_request(&req->base, &rctx->cmd); rctx 175 drivers/crypto/ccp/ccp-crypto-sha.c sg_free_table(&rctx->data_sg); rctx 184 drivers/crypto/ccp/ccp-crypto-sha.c struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 190 drivers/crypto/ccp/ccp-crypto-sha.c memset(rctx, 0, sizeof(*rctx)); rctx 192 drivers/crypto/ccp/ccp-crypto-sha.c rctx->type = alg->type; rctx 193 drivers/crypto/ccp/ccp-crypto-sha.c rctx->first = 1; rctx 197 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(rctx->buf, ctx->u.sha.ipad, block_size); rctx 198 drivers/crypto/ccp/ccp-crypto-sha.c rctx->buf_count = block_size; rctx 232 drivers/crypto/ccp/ccp-crypto-sha.c struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 238 drivers/crypto/ccp/ccp-crypto-sha.c state.type = rctx->type; rctx 239 drivers/crypto/ccp/ccp-crypto-sha.c state.msg_bits = rctx->msg_bits; rctx 240 drivers/crypto/ccp/ccp-crypto-sha.c state.first = rctx->first; rctx 241 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(state.ctx, rctx->ctx, sizeof(state.ctx)); rctx 242 drivers/crypto/ccp/ccp-crypto-sha.c state.buf_count = rctx->buf_count; rctx 243 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(state.buf, rctx->buf, sizeof(state.buf)); rctx 253 drivers/crypto/ccp/ccp-crypto-sha.c struct ccp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 259 drivers/crypto/ccp/ccp-crypto-sha.c memset(rctx, 0, sizeof(*rctx)); rctx 260 drivers/crypto/ccp/ccp-crypto-sha.c rctx->type = state.type; rctx 261 drivers/crypto/ccp/ccp-crypto-sha.c rctx->msg_bits = state.msg_bits; rctx 262 drivers/crypto/ccp/ccp-crypto-sha.c rctx->first = state.first; rctx 263 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(rctx->ctx, state.ctx, sizeof(rctx->ctx)); rctx 264 drivers/crypto/ccp/ccp-crypto-sha.c rctx->buf_count = state.buf_count; rctx 265 drivers/crypto/ccp/ccp-crypto-sha.c memcpy(rctx->buf, state.buf, sizeof(rctx->buf)); rctx 1112 drivers/crypto/hifn_795x.c struct hifn_context *ctx, struct hifn_request_context *rctx, rctx 1124 drivers/crypto/hifn_795x.c switch (rctx->op) { rctx 1141 drivers/crypto/hifn_795x.c if (rctx->op == ACRYPTO_OP_ENCRYPT || rctx->op == ACRYPTO_OP_DECRYPT) { rctx 1146 drivers/crypto/hifn_795x.c if (rctx->iv && rctx->mode != ACRYPTO_MODE_ECB) rctx 1149 drivers/crypto/hifn_795x.c switch (rctx->mode) { rctx 1166 drivers/crypto/hifn_795x.c switch (rctx->type) { rctx 1201 drivers/crypto/hifn_795x.c rctx->iv, rctx->ivsize, md); rctx 1318 drivers/crypto/hifn_795x.c struct hifn_context *ctx, struct hifn_request_context *rctx, rctx 1339 drivers/crypto/hifn_795x.c t = &rctx->walk.cache[0]; rctx 1342 drivers/crypto/hifn_795x.c if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { rctx 1362 drivers/crypto/hifn_795x.c hifn_setup_cmd_desc(dev, ctx, rctx, priv, nbytes); rctx 1524 drivers/crypto/hifn_795x.c struct hifn_request_context *rctx = ablkcipher_request_ctx(req); rctx 1531 drivers/crypto/hifn_795x.c if (rctx->iv && !rctx->ivsize && rctx->mode != ACRYPTO_MODE_ECB) rctx 1534 drivers/crypto/hifn_795x.c rctx->walk.flags = 0; rctx 1542 drivers/crypto/hifn_795x.c rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED; rctx 1548 drivers/crypto/hifn_795x.c if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { rctx 1549 drivers/crypto/hifn_795x.c err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC); rctx 1554 drivers/crypto/hifn_795x.c sg_num = hifn_cipher_walk(req, &rctx->walk); rctx 1566 drivers/crypto/hifn_795x.c err = hifn_setup_dma(dev, ctx, rctx, req->src, req->dst, req->nbytes, req); rctx 1583 drivers/crypto/hifn_795x.c rctx->iv, rctx->ivsize, rctx 1585 drivers/crypto/hifn_795x.c rctx->mode, rctx->op, rctx->type, err); rctx 1665 drivers/crypto/hifn_795x.c struct hifn_request_context *rctx = ablkcipher_request_ctx(req); rctx 1667 drivers/crypto/hifn_795x.c if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) { rctx 1674 drivers/crypto/hifn_795x.c t = &rctx->walk.cache[idx]; rctx 1702 drivers/crypto/hifn_795x.c hifn_cipher_walk_exit(&rctx->walk); rctx 2001 drivers/crypto/hifn_795x.c struct hifn_request_context *rctx = ablkcipher_request_ctx(req); rctx 2022 drivers/crypto/hifn_795x.c rctx->op = op; rctx 2023 drivers/crypto/hifn_795x.c rctx->mode = mode; rctx 2024 drivers/crypto/hifn_795x.c rctx->type = type; rctx 2025 drivers/crypto/hifn_795x.c rctx->iv = req->info; rctx 2026 drivers/crypto/hifn_795x.c rctx->ivsize = ivsize; rctx 484 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 487 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 488 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 491 drivers/crypto/img-hash.c return crypto_ahash_init(&rctx->fallback_req); rctx 547 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 551 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 552 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 554 drivers/crypto/img-hash.c rctx->fallback_req.nbytes = req->nbytes; rctx 555 drivers/crypto/img-hash.c rctx->fallback_req.src = req->src; rctx 557 drivers/crypto/img-hash.c return crypto_ahash_update(&rctx->fallback_req); rctx 562 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 566 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 567 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 569 drivers/crypto/img-hash.c rctx->fallback_req.result = req->result; rctx 571 drivers/crypto/img-hash.c return crypto_ahash_final(&rctx->fallback_req); rctx 576 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 580 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 581 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 583 drivers/crypto/img-hash.c rctx->fallback_req.nbytes = req->nbytes; rctx 584 drivers/crypto/img-hash.c rctx->fallback_req.src = req->src; rctx 585 drivers/crypto/img-hash.c rctx->fallback_req.result = req->result; rctx 587 drivers/crypto/img-hash.c return crypto_ahash_finup(&rctx->fallback_req); rctx 592 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 596 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 597 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 600 drivers/crypto/img-hash.c return crypto_ahash_import(&rctx->fallback_req, in); rctx 605 drivers/crypto/img-hash.c struct img_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 609 drivers/crypto/img-hash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback); rctx 610 drivers/crypto/img-hash.c rctx->fallback_req.base.flags = req->base.flags rctx 613 drivers/crypto/img-hash.c return crypto_ahash_export(&rctx->fallback_req, out); rctx 520 drivers/crypto/inside-secure/safexcel_hash.c struct safexcel_ahash_req *rctx = ahash_request_ctx(req); rctx 534 drivers/crypto/inside-secure/safexcel_hash.c rctx->needs_inv = true; rctx 223 drivers/crypto/mediatek/mtk-aes.c const struct mtk_aes_reqctx *rctx) rctx 226 drivers/crypto/mediatek/mtk-aes.c aes->flags = (aes->flags & AES_FLAGS_BUSY) | rctx->mode; rctx 556 drivers/crypto/mediatek/mtk-aes.c struct mtk_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 558 drivers/crypto/mediatek/mtk-aes.c mtk_aes_set_mode(aes, rctx); rctx 624 drivers/crypto/mediatek/mtk-aes.c struct mtk_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 626 drivers/crypto/mediatek/mtk-aes.c mtk_aes_set_mode(aes, rctx); rctx 668 drivers/crypto/mediatek/mtk-aes.c struct mtk_aes_reqctx *rctx; rctx 675 drivers/crypto/mediatek/mtk-aes.c rctx = ablkcipher_request_ctx(req); rctx 676 drivers/crypto/mediatek/mtk-aes.c rctx->mode = mode; rctx 962 drivers/crypto/mediatek/mtk-aes.c struct mtk_aes_reqctx *rctx = aead_request_ctx(req); rctx 965 drivers/crypto/mediatek/mtk-aes.c mtk_aes_set_mode(aes, rctx); rctx 987 drivers/crypto/mediatek/mtk-aes.c struct mtk_aes_reqctx *rctx = aead_request_ctx(req); rctx 1002 drivers/crypto/mediatek/mtk-aes.c rctx->mode = AES_FLAGS_GCM | mode; rctx 219 drivers/crypto/mxs-dcp.c struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 244 drivers/crypto/mxs-dcp.c if (rctx->enc) rctx 251 drivers/crypto/mxs-dcp.c if (rctx->ecb) rctx 280 drivers/crypto/mxs-dcp.c struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 307 drivers/crypto/mxs-dcp.c if (!rctx->ecb) { rctx 377 drivers/crypto/mxs-dcp.c if (!rctx->ecb) { rctx 378 drivers/crypto/mxs-dcp.c if (rctx->enc) rctx 453 drivers/crypto/mxs-dcp.c struct dcp_aes_req_ctx *rctx = ablkcipher_request_ctx(req); rctx 459 drivers/crypto/mxs-dcp.c rctx->enc = enc; rctx 460 drivers/crypto/mxs-dcp.c rctx->ecb = ecb; rctx 561 drivers/crypto/mxs-dcp.c struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 572 drivers/crypto/mxs-dcp.c if (rctx->init) rctx 586 drivers/crypto/mxs-dcp.c if (rctx->init && rctx->fini && desc->size == 0) { rctx 597 drivers/crypto/mxs-dcp.c if (rctx->fini) { rctx 606 drivers/crypto/mxs-dcp.c if (rctx->fini) rctx 623 drivers/crypto/mxs-dcp.c struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 634 drivers/crypto/mxs-dcp.c int fin = rctx->fini; rctx 636 drivers/crypto/mxs-dcp.c rctx->fini = 0; rctx 663 drivers/crypto/mxs-dcp.c rctx->init = 0; rctx 668 drivers/crypto/mxs-dcp.c rctx->fini = 1; rctx 755 drivers/crypto/mxs-dcp.c struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 770 drivers/crypto/mxs-dcp.c rctx->fini = fini; rctx 774 drivers/crypto/mxs-dcp.c rctx->init = 1; rctx 817 drivers/crypto/mxs-dcp.c struct dcp_sha_req_ctx *rctx = ahash_request_ctx(req); rctx 822 drivers/crypto/mxs-dcp.c memset(rctx, 0, sizeof(struct dcp_sha_req_ctx)); rctx 824 drivers/crypto/mxs-dcp.c memcpy(rctx, &export->req_ctx, sizeof(struct dcp_sha_req_ctx)); rctx 311 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 315 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 316 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; rctx 318 drivers/crypto/n2_core.c return crypto_ahash_init(&rctx->fallback_req); rctx 323 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 327 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 328 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; rctx 329 drivers/crypto/n2_core.c rctx->fallback_req.nbytes = req->nbytes; rctx 330 drivers/crypto/n2_core.c rctx->fallback_req.src = req->src; rctx 332 drivers/crypto/n2_core.c return crypto_ahash_update(&rctx->fallback_req); rctx 337 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 341 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 342 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; rctx 343 drivers/crypto/n2_core.c rctx->fallback_req.result = req->result; rctx 345 drivers/crypto/n2_core.c return crypto_ahash_final(&rctx->fallback_req); rctx 350 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 354 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 355 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP; rctx 356 drivers/crypto/n2_core.c rctx->fallback_req.nbytes = req->nbytes; rctx 357 drivers/crypto/n2_core.c rctx->fallback_req.src = req->src; rctx 358 drivers/crypto/n2_core.c rctx->fallback_req.result = req->result; rctx 360 drivers/crypto/n2_core.c return crypto_ahash_finup(&rctx->fallback_req); rctx 537 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 540 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 541 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = rctx 543 drivers/crypto/n2_core.c rctx->fallback_req.nbytes = req->nbytes; rctx 544 drivers/crypto/n2_core.c rctx->fallback_req.src = req->src; rctx 545 drivers/crypto/n2_core.c rctx->fallback_req.result = req->result; rctx 547 drivers/crypto/n2_core.c return crypto_ahash_digest(&rctx->fallback_req); rctx 612 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 620 drivers/crypto/n2_core.c memcpy(&rctx->u, n2alg->hash_init, n2alg->hw_op_hashsz); rctx 624 drivers/crypto/n2_core.c &rctx->u, 0UL, 0); rctx 630 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 638 drivers/crypto/n2_core.c struct n2_hash_req_ctx *rctx = ahash_request_ctx(req); rctx 641 drivers/crypto/n2_core.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 642 drivers/crypto/n2_core.c rctx->fallback_req.base.flags = rctx 644 drivers/crypto/n2_core.c rctx->fallback_req.nbytes = req->nbytes; rctx 645 drivers/crypto/n2_core.c rctx->fallback_req.src = req->src; rctx 646 drivers/crypto/n2_core.c rctx->fallback_req.result = req->result; rctx 648 drivers/crypto/n2_core.c return crypto_ahash_digest(&rctx->fallback_req); rctx 650 drivers/crypto/n2_core.c memcpy(&rctx->u, n2alg->derived.hash_init, rctx 655 drivers/crypto/n2_core.c &rctx->u, rctx 882 drivers/crypto/n2_core.c struct n2_request_context *rctx = ablkcipher_request_ctx(req); rctx 883 drivers/crypto/n2_core.c struct ablkcipher_walk *walk = &rctx->walk; rctx 895 drivers/crypto/n2_core.c INIT_LIST_HEAD(&rctx->chunk_list); rctx 897 drivers/crypto/n2_core.c chunk = &rctx->chunk; rctx 928 drivers/crypto/n2_core.c &rctx->chunk_list); rctx 955 drivers/crypto/n2_core.c list_add_tail(&chunk->entry, &rctx->chunk_list); rctx 963 drivers/crypto/n2_core.c struct n2_request_context *rctx = ablkcipher_request_ctx(req); rctx 967 drivers/crypto/n2_core.c memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize); rctx 969 drivers/crypto/n2_core.c ablkcipher_walk_complete(&rctx->walk); rctx 970 drivers/crypto/n2_core.c list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { rctx 972 drivers/crypto/n2_core.c if (unlikely(c != &rctx->chunk)) rctx 980 drivers/crypto/n2_core.c struct n2_request_context *rctx = ablkcipher_request_ctx(req); rctx 997 drivers/crypto/n2_core.c list_for_each_entry_safe(c, tmp, &rctx->chunk_list, entry) { rctx 1002 drivers/crypto/n2_core.c if (unlikely(c != &rctx->chunk)) rctx 1032 drivers/crypto/n2_core.c struct n2_request_context *rctx = ablkcipher_request_ctx(req); rctx 1053 drivers/crypto/n2_core.c iv_paddr = __pa(rctx->walk.iv); rctx 1054 drivers/crypto/n2_core.c list_for_each_entry_safe(c, tmp, &rctx->chunk_list, rctx 1060 drivers/crypto/n2_core.c iv_paddr = c->dest_final - rctx->walk.blocksize; rctx 1062 drivers/crypto/n2_core.c if (unlikely(c != &rctx->chunk)) rctx 1067 drivers/crypto/n2_core.c list_for_each_entry_safe_reverse(c, tmp, &rctx->chunk_list, rctx 1069 drivers/crypto/n2_core.c if (c == &rctx->chunk) { rctx 1070 drivers/crypto/n2_core.c iv_paddr = __pa(rctx->walk.iv); rctx 1074 drivers/crypto/n2_core.c rctx->walk.blocksize); rctx 1081 drivers/crypto/n2_core.c rctx->walk.blocksize); rctx 1082 drivers/crypto/n2_core.c final_iv_addr = rctx->temp_iv; rctx 1083 drivers/crypto/n2_core.c memcpy(rctx->temp_iv, __va(pa), rctx 1084 drivers/crypto/n2_core.c rctx->walk.blocksize); rctx 1091 drivers/crypto/n2_core.c if (unlikely(c != &rctx->chunk)) rctx 483 drivers/crypto/nx/nx-aes-ccm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 485 drivers/crypto/nx/nx-aes-ccm.c u8 *iv = rctx->iv; rctx 513 drivers/crypto/nx/nx-aes-ccm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 515 drivers/crypto/nx/nx-aes-ccm.c u8 *iv = rctx->iv; rctx 314 drivers/crypto/nx/nx-aes-gcm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 324 drivers/crypto/nx/nx-aes-gcm.c desc.info = rctx->iv; rctx 422 drivers/crypto/nx/nx-aes-gcm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 423 drivers/crypto/nx/nx-aes-gcm.c char *iv = rctx->iv; rctx 432 drivers/crypto/nx/nx-aes-gcm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 433 drivers/crypto/nx/nx-aes-gcm.c char *iv = rctx->iv; rctx 444 drivers/crypto/nx/nx-aes-gcm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 445 drivers/crypto/nx/nx-aes-gcm.c char *iv = rctx->iv; rctx 461 drivers/crypto/nx/nx-aes-gcm.c struct nx_gcm_rctx *rctx = aead_request_ctx(req); rctx 462 drivers/crypto/nx/nx-aes-gcm.c char *iv = rctx->iv; rctx 43 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx; rctx 47 drivers/crypto/omap-aes-gcm.c rctx = aead_request_ctx(dd->aead_req); rctx 62 drivers/crypto/omap-aes-gcm.c scatterwalk_map_and_copy(rctx->auth_tag, rctx 74 drivers/crypto/omap-aes-gcm.c tag = (u8 *)rctx->auth_tag; rctx 225 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx; rctx 234 drivers/crypto/omap-aes-gcm.c rctx = aead_request_ctx(dd->aead_req); rctx 235 drivers/crypto/omap-aes-gcm.c auth_tag = (u32 *)rctx->auth_tag; rctx 251 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx; rctx 276 drivers/crypto/omap-aes-gcm.c rctx = aead_request_ctx(req); rctx 279 drivers/crypto/omap-aes-gcm.c rctx->dd = dd; rctx 282 drivers/crypto/omap-aes-gcm.c rctx->mode &= FLAGS_MODE_MASK; rctx 283 drivers/crypto/omap-aes-gcm.c dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; rctx 303 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx = aead_request_ctx(req); rctx 310 drivers/crypto/omap-aes-gcm.c memset(rctx->auth_tag, 0, sizeof(rctx->auth_tag)); rctx 311 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv + GCM_AES_IV_SIZE, &counter, 4); rctx 313 drivers/crypto/omap-aes-gcm.c err = do_encrypt_iv(req, (u32 *)rctx->auth_tag, (u32 *)rctx->iv); rctx 322 drivers/crypto/omap-aes-gcm.c scatterwalk_map_and_copy(rctx->auth_tag, req->dst, 0, authlen, rctx 327 drivers/crypto/omap-aes-gcm.c dd = omap_aes_find_dev(rctx); rctx 330 drivers/crypto/omap-aes-gcm.c rctx->mode = mode; rctx 337 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx = aead_request_ctx(req); rctx 339 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); rctx 345 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx = aead_request_ctx(req); rctx 347 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv, req->iv, GCM_AES_IV_SIZE); rctx 354 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx = aead_request_ctx(req); rctx 356 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv, ctx->nonce, 4); rctx 357 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv + 4, req->iv, 8); rctx 365 drivers/crypto/omap-aes-gcm.c struct omap_aes_reqctx *rctx = aead_request_ctx(req); rctx 367 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv, ctx->nonce, 4); rctx 368 drivers/crypto/omap-aes-gcm.c memcpy(rctx->iv + 4, req->iv, 8); rctx 124 drivers/crypto/omap-aes.c struct omap_aes_reqctx *rctx; rctx 149 drivers/crypto/omap-aes.c rctx = aead_request_ctx(dd->aead_req); rctx 150 drivers/crypto/omap-aes.c omap_aes_write_n(dd, AES_REG_IV(dd, 0), (u32 *)rctx->iv, 4); rctx 209 drivers/crypto/omap-aes.c struct omap_aes_dev *omap_aes_find_dev(struct omap_aes_reqctx *rctx) rctx 216 drivers/crypto/omap-aes.c rctx->dd = dd; rctx 420 drivers/crypto/omap-aes.c struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 421 drivers/crypto/omap-aes.c struct omap_aes_dev *dd = rctx->dd; rctx 460 drivers/crypto/omap-aes.c rctx->mode &= FLAGS_MODE_MASK; rctx 461 drivers/crypto/omap-aes.c dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; rctx 464 drivers/crypto/omap-aes.c rctx->dd = dd; rctx 473 drivers/crypto/omap-aes.c struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 474 drivers/crypto/omap-aes.c struct omap_aes_dev *dd = rctx->dd; rctx 512 drivers/crypto/omap-aes.c struct omap_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 537 drivers/crypto/omap-aes.c dd = omap_aes_find_dev(rctx); rctx 541 drivers/crypto/omap-aes.c rctx->mode = mode; rctx 198 drivers/crypto/omap-aes.h struct omap_aes_dev *omap_aes_find_dev(struct omap_aes_reqctx *rctx); rctx 532 drivers/crypto/omap-des.c struct omap_des_reqctx *rctx; rctx 571 drivers/crypto/omap-des.c rctx = ablkcipher_request_ctx(req); rctx 573 drivers/crypto/omap-des.c rctx->mode &= FLAGS_MODE_MASK; rctx 574 drivers/crypto/omap-des.c dd->flags = (dd->flags & ~FLAGS_MODE_MASK) | rctx->mode; rctx 626 drivers/crypto/omap-des.c struct omap_des_reqctx *rctx = ablkcipher_request_ctx(req); rctx 642 drivers/crypto/omap-des.c rctx->mode = mode; rctx 732 drivers/crypto/omap-sham.c struct omap_sham_reqctx *rctx) rctx 739 drivers/crypto/omap-sham.c int offset = rctx->offset; rctx 798 drivers/crypto/omap-sham.c return omap_sham_copy_sgs(rctx, sg, bs, new_len); rctx 800 drivers/crypto/omap-sham.c return omap_sham_copy_sg_lists(rctx, sg, bs, new_len); rctx 802 drivers/crypto/omap-sham.c rctx->sg_len = n; rctx 803 drivers/crypto/omap-sham.c rctx->sg = sg; rctx 810 drivers/crypto/omap-sham.c struct omap_sham_reqctx *rctx = ahash_request_ctx(req); rctx 814 drivers/crypto/omap-sham.c bool final = rctx->flags & BIT(FLAGS_FINUP); rctx 817 drivers/crypto/omap-sham.c bs = get_block_size(rctx); rctx 824 drivers/crypto/omap-sham.c rctx->total = nbytes + rctx->bufcnt; rctx 826 drivers/crypto/omap-sham.c if (!rctx->total) rctx 829 drivers/crypto/omap-sham.c if (nbytes && (!IS_ALIGNED(rctx->bufcnt, bs))) { rctx 830 drivers/crypto/omap-sham.c int len = bs - rctx->bufcnt % bs; rctx 834 drivers/crypto/omap-sham.c scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, req->src, rctx 836 drivers/crypto/omap-sham.c rctx->bufcnt += len; rctx 838 drivers/crypto/omap-sham.c rctx->offset = len; rctx 841 drivers/crypto/omap-sham.c if (rctx->bufcnt) rctx 842 drivers/crypto/omap-sham.c memcpy(rctx->dd->xmit_buf, rctx->buffer, rctx->bufcnt); rctx 844 drivers/crypto/omap-sham.c ret = omap_sham_align_sgs(req->src, nbytes, bs, final, rctx); rctx 848 drivers/crypto/omap-sham.c xmit_len = rctx->total; rctx 859 drivers/crypto/omap-sham.c hash_later = rctx->total - xmit_len; rctx 863 drivers/crypto/omap-sham.c if (rctx->bufcnt && nbytes) { rctx 865 drivers/crypto/omap-sham.c sg_init_table(rctx->sgl, 2); rctx 866 drivers/crypto/omap-sham.c sg_set_buf(rctx->sgl, rctx->dd->xmit_buf, rctx->bufcnt); rctx 868 drivers/crypto/omap-sham.c sg_chain(rctx->sgl, 2, req->src); rctx 870 drivers/crypto/omap-sham.c rctx->sg = rctx->sgl; rctx 872 drivers/crypto/omap-sham.c rctx->sg_len++; rctx 873 drivers/crypto/omap-sham.c } else if (rctx->bufcnt) { rctx 875 drivers/crypto/omap-sham.c sg_init_table(rctx->sgl, 1); rctx 876 drivers/crypto/omap-sham.c sg_set_buf(rctx->sgl, rctx->dd->xmit_buf, xmit_len); rctx 878 drivers/crypto/omap-sham.c rctx->sg = rctx->sgl; rctx 880 drivers/crypto/omap-sham.c rctx->sg_len = 1; rctx 887 drivers/crypto/omap-sham.c memcpy(rctx->buffer, rctx->buffer + xmit_len, rctx 893 drivers/crypto/omap-sham.c scatterwalk_map_and_copy(rctx->buffer + offset, rctx 899 drivers/crypto/omap-sham.c rctx->bufcnt = hash_later; rctx 901 drivers/crypto/omap-sham.c rctx->bufcnt = 0; rctx 905 drivers/crypto/omap-sham.c rctx->total = xmit_len; rctx 1433 drivers/crypto/omap-sham.c struct omap_sham_reqctx *rctx = ahash_request_ctx(req); rctx 1435 drivers/crypto/omap-sham.c memcpy(out, rctx, sizeof(*rctx) + rctx->bufcnt); rctx 1442 drivers/crypto/omap-sham.c struct omap_sham_reqctx *rctx = ahash_request_ctx(req); rctx 1445 drivers/crypto/omap-sham.c memcpy(rctx, in, sizeof(*rctx) + ctx_in->bufcnt); rctx 21 drivers/crypto/qce/ablkcipher.c struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); rctx 39 drivers/crypto/qce/ablkcipher.c dma_unmap_sg(qce->dev, rctx->src_sg, rctx->src_nents, dir_src); rctx 40 drivers/crypto/qce/ablkcipher.c dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); rctx 42 drivers/crypto/qce/ablkcipher.c sg_free_table(&rctx->dst_tbl); rctx 55 drivers/crypto/qce/ablkcipher.c struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); rctx 65 drivers/crypto/qce/ablkcipher.c rctx->iv = req->info; rctx 66 drivers/crypto/qce/ablkcipher.c rctx->ivsize = crypto_ablkcipher_ivsize(ablkcipher); rctx 67 drivers/crypto/qce/ablkcipher.c rctx->cryptlen = req->nbytes; rctx 73 drivers/crypto/qce/ablkcipher.c rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); rctx 75 drivers/crypto/qce/ablkcipher.c rctx->dst_nents = sg_nents_for_len(req->dst, req->nbytes); rctx 77 drivers/crypto/qce/ablkcipher.c rctx->dst_nents = rctx->src_nents; rctx 78 drivers/crypto/qce/ablkcipher.c if (rctx->src_nents < 0) { rctx 80 drivers/crypto/qce/ablkcipher.c return rctx->src_nents; rctx 82 drivers/crypto/qce/ablkcipher.c if (rctx->dst_nents < 0) { rctx 84 drivers/crypto/qce/ablkcipher.c return -rctx->dst_nents; rctx 87 drivers/crypto/qce/ablkcipher.c rctx->dst_nents += 1; rctx 92 drivers/crypto/qce/ablkcipher.c ret = sg_alloc_table(&rctx->dst_tbl, rctx->dst_nents, gfp); rctx 96 drivers/crypto/qce/ablkcipher.c sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); rctx 98 drivers/crypto/qce/ablkcipher.c sg = qce_sgtable_add(&rctx->dst_tbl, req->dst); rctx 104 drivers/crypto/qce/ablkcipher.c sg = qce_sgtable_add(&rctx->dst_tbl, &rctx->result_sg); rctx 111 drivers/crypto/qce/ablkcipher.c rctx->dst_sg = rctx->dst_tbl.sgl; rctx 113 drivers/crypto/qce/ablkcipher.c ret = dma_map_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); rctx 118 drivers/crypto/qce/ablkcipher.c ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, dir_src); rctx 121 drivers/crypto/qce/ablkcipher.c rctx->src_sg = req->src; rctx 123 drivers/crypto/qce/ablkcipher.c rctx->src_sg = rctx->dst_sg; rctx 126 drivers/crypto/qce/ablkcipher.c ret = qce_dma_prep_sgs(&qce->dma, rctx->src_sg, rctx->src_nents, rctx 127 drivers/crypto/qce/ablkcipher.c rctx->dst_sg, rctx->dst_nents, rctx 144 drivers/crypto/qce/ablkcipher.c dma_unmap_sg(qce->dev, req->src, rctx->src_nents, dir_src); rctx 146 drivers/crypto/qce/ablkcipher.c dma_unmap_sg(qce->dev, rctx->dst_sg, rctx->dst_nents, dir_dst); rctx 148 drivers/crypto/qce/ablkcipher.c sg_free_table(&rctx->dst_tbl); rctx 215 drivers/crypto/qce/ablkcipher.c struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); rctx 219 drivers/crypto/qce/ablkcipher.c rctx->flags = tmpl->alg_flags; rctx 220 drivers/crypto/qce/ablkcipher.c rctx->flags |= encrypt ? QCE_ENCRYPT : QCE_DECRYPT; rctx 222 drivers/crypto/qce/ablkcipher.c if (IS_AES(rctx->flags) && ctx->enc_keylen != AES_KEYSIZE_128 && rctx 227 drivers/crypto/qce/common.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 238 drivers/crypto/qce/common.c if (!rctx->last_blk && req->nbytes % blocksize) rctx 243 drivers/crypto/qce/common.c if (IS_CMAC(rctx->flags)) { rctx 251 drivers/crypto/qce/common.c auth_cfg = qce_auth_cfg(rctx->flags, rctx->authklen); rctx 254 drivers/crypto/qce/common.c if (IS_SHA_HMAC(rctx->flags) || IS_CMAC(rctx->flags)) { rctx 255 drivers/crypto/qce/common.c u32 authkey_words = rctx->authklen / sizeof(u32); rctx 257 drivers/crypto/qce/common.c qce_cpu_to_be32p_array(mackey, rctx->authkey, rctx->authklen); rctx 262 drivers/crypto/qce/common.c if (IS_CMAC(rctx->flags)) rctx 265 drivers/crypto/qce/common.c if (rctx->first_blk) rctx 266 drivers/crypto/qce/common.c memcpy(auth, rctx->digest, digestsize); rctx 268 drivers/crypto/qce/common.c qce_cpu_to_be32p_array(auth, rctx->digest, digestsize); rctx 270 drivers/crypto/qce/common.c iv_words = (IS_SHA1(rctx->flags) || IS_SHA1_HMAC(rctx->flags)) ? 5 : 8; rctx 273 drivers/crypto/qce/common.c if (rctx->first_blk) rctx 277 drivers/crypto/qce/common.c (u32 *)rctx->byte_count, 2); rctx 279 drivers/crypto/qce/common.c auth_cfg = qce_auth_cfg(rctx->flags, 0); rctx 281 drivers/crypto/qce/common.c if (rctx->last_blk) rctx 286 drivers/crypto/qce/common.c if (rctx->first_blk) rctx 311 drivers/crypto/qce/common.c struct qce_cipher_reqctx *rctx = ablkcipher_request_ctx(req); rctx 320 drivers/crypto/qce/common.c unsigned int ivsize = rctx->ivsize; rctx 321 drivers/crypto/qce/common.c unsigned long flags = rctx->flags; rctx 346 drivers/crypto/qce/common.c rctx->cryptlen); rctx 356 drivers/crypto/qce/common.c qce_xts_swapiv(enciv, rctx->iv, ivsize); rctx 358 drivers/crypto/qce/common.c qce_cpu_to_be32p_array(enciv, rctx->iv, ivsize); rctx 367 drivers/crypto/qce/common.c qce_write(qce, REG_ENCR_SEG_SIZE, rctx->cryptlen); rctx 34 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 46 drivers/crypto/qce/sha.c dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); rctx 47 drivers/crypto/qce/sha.c dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); rctx 49 drivers/crypto/qce/sha.c memcpy(rctx->digest, result->auth_iv, digestsize); rctx 53 drivers/crypto/qce/sha.c rctx->byte_count[0] = cpu_to_be32(result->auth_byte_count[0]); rctx 54 drivers/crypto/qce/sha.c rctx->byte_count[1] = cpu_to_be32(result->auth_byte_count[1]); rctx 60 drivers/crypto/qce/sha.c req->src = rctx->src_orig; rctx 61 drivers/crypto/qce/sha.c req->nbytes = rctx->nbytes_orig; rctx 62 drivers/crypto/qce/sha.c rctx->last_blk = false; rctx 63 drivers/crypto/qce/sha.c rctx->first_blk = false; rctx 71 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 75 drivers/crypto/qce/sha.c unsigned long flags = rctx->flags; rctx 79 drivers/crypto/qce/sha.c rctx->authkey = ctx->authkey; rctx 80 drivers/crypto/qce/sha.c rctx->authklen = QCE_SHA_HMAC_KEY_SIZE; rctx 82 drivers/crypto/qce/sha.c rctx->authkey = ctx->authkey; rctx 83 drivers/crypto/qce/sha.c rctx->authklen = AES_KEYSIZE_128; rctx 86 drivers/crypto/qce/sha.c rctx->src_nents = sg_nents_for_len(req->src, req->nbytes); rctx 87 drivers/crypto/qce/sha.c if (rctx->src_nents < 0) { rctx 89 drivers/crypto/qce/sha.c return rctx->src_nents; rctx 92 drivers/crypto/qce/sha.c ret = dma_map_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); rctx 96 drivers/crypto/qce/sha.c sg_init_one(&rctx->result_sg, qce->dma.result_buf, QCE_RESULT_BUF_SZ); rctx 98 drivers/crypto/qce/sha.c ret = dma_map_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); rctx 102 drivers/crypto/qce/sha.c ret = qce_dma_prep_sgs(&qce->dma, req->src, rctx->src_nents, rctx 103 drivers/crypto/qce/sha.c &rctx->result_sg, 1, qce_ahash_done, async_req); rctx 118 drivers/crypto/qce/sha.c dma_unmap_sg(qce->dev, &rctx->result_sg, 1, DMA_FROM_DEVICE); rctx 120 drivers/crypto/qce/sha.c dma_unmap_sg(qce->dev, req->src, rctx->src_nents, DMA_TO_DEVICE); rctx 126 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 130 drivers/crypto/qce/sha.c memset(rctx, 0, sizeof(*rctx)); rctx 131 drivers/crypto/qce/sha.c rctx->first_blk = true; rctx 132 drivers/crypto/qce/sha.c rctx->last_blk = false; rctx 133 drivers/crypto/qce/sha.c rctx->flags = tmpl->alg_flags; rctx 134 drivers/crypto/qce/sha.c memcpy(rctx->digest, std_iv, sizeof(rctx->digest)); rctx 142 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 143 drivers/crypto/qce/sha.c unsigned long flags = rctx->flags; rctx 151 drivers/crypto/qce/sha.c out_state->count = rctx->count; rctx 153 drivers/crypto/qce/sha.c rctx->digest, digestsize); rctx 154 drivers/crypto/qce/sha.c memcpy(out_state->buffer, rctx->buf, blocksize); rctx 158 drivers/crypto/qce/sha.c out_state->count = rctx->count; rctx 160 drivers/crypto/qce/sha.c rctx->digest, digestsize); rctx 161 drivers/crypto/qce/sha.c memcpy(out_state->buf, rctx->buf, blocksize); rctx 173 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 179 drivers/crypto/qce/sha.c rctx->count = in_count; rctx 180 drivers/crypto/qce/sha.c memcpy(rctx->buf, buffer, blocksize); rctx 183 drivers/crypto/qce/sha.c rctx->first_blk = 1; rctx 185 drivers/crypto/qce/sha.c rctx->first_blk = 0; rctx 195 drivers/crypto/qce/sha.c rctx->byte_count[0] = (__force __be32)(count & ~SHA_PADDING_MASK); rctx 196 drivers/crypto/qce/sha.c rctx->byte_count[1] = (__force __be32)(count >> 32); rctx 197 drivers/crypto/qce/sha.c qce_cpu_to_be32p_array((__be32 *)rctx->digest, (const u8 *)state, rctx 199 drivers/crypto/qce/sha.c rctx->buflen = (unsigned int)(in_count & (blocksize - 1)); rctx 206 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 207 drivers/crypto/qce/sha.c unsigned long flags = rctx->flags; rctx 229 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 239 drivers/crypto/qce/sha.c rctx->count += req->nbytes; rctx 242 drivers/crypto/qce/sha.c total = req->nbytes + rctx->buflen; rctx 245 drivers/crypto/qce/sha.c scatterwalk_map_and_copy(rctx->buf + rctx->buflen, req->src, rctx 247 drivers/crypto/qce/sha.c rctx->buflen += req->nbytes; rctx 252 drivers/crypto/qce/sha.c rctx->src_orig = req->src; rctx 253 drivers/crypto/qce/sha.c rctx->nbytes_orig = req->nbytes; rctx 259 drivers/crypto/qce/sha.c if (rctx->buflen) rctx 260 drivers/crypto/qce/sha.c memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); rctx 266 drivers/crypto/qce/sha.c scatterwalk_map_and_copy(rctx->buf, req->src, src_offset, rctx 273 drivers/crypto/qce/sha.c len = rctx->buflen; rctx 289 drivers/crypto/qce/sha.c if (rctx->buflen) { rctx 290 drivers/crypto/qce/sha.c sg_init_table(rctx->sg, 2); rctx 291 drivers/crypto/qce/sha.c sg_set_buf(rctx->sg, rctx->tmpbuf, rctx->buflen); rctx 292 drivers/crypto/qce/sha.c sg_chain(rctx->sg, 2, req->src); rctx 293 drivers/crypto/qce/sha.c req->src = rctx->sg; rctx 297 drivers/crypto/qce/sha.c rctx->buflen = hash_later; rctx 304 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 308 drivers/crypto/qce/sha.c if (!rctx->buflen) rctx 311 drivers/crypto/qce/sha.c rctx->last_blk = true; rctx 313 drivers/crypto/qce/sha.c rctx->src_orig = req->src; rctx 314 drivers/crypto/qce/sha.c rctx->nbytes_orig = req->nbytes; rctx 316 drivers/crypto/qce/sha.c memcpy(rctx->tmpbuf, rctx->buf, rctx->buflen); rctx 317 drivers/crypto/qce/sha.c sg_init_one(rctx->sg, rctx->tmpbuf, rctx->buflen); rctx 319 drivers/crypto/qce/sha.c req->src = rctx->sg; rctx 320 drivers/crypto/qce/sha.c req->nbytes = rctx->buflen; rctx 327 drivers/crypto/qce/sha.c struct qce_sha_reqctx *rctx = ahash_request_ctx(req); rctx 336 drivers/crypto/qce/sha.c rctx->src_orig = req->src; rctx 337 drivers/crypto/qce/sha.c rctx->nbytes_orig = req->nbytes; rctx 338 drivers/crypto/qce/sha.c rctx->first_blk = true; rctx 339 drivers/crypto/qce/sha.c rctx->last_blk = true; rctx 49 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 69 drivers/crypto/rockchip/rk3288_crypto_ahash.c CRYPTO_WRITE(dev, RK_CRYPTO_HASH_CTRL, rctx->mode | rctx 81 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 85 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 86 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 89 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_init(&rctx->fallback_req); rctx 94 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 98 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 99 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 101 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.nbytes = req->nbytes; rctx 102 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.src = req->src; rctx 104 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_update(&rctx->fallback_req); rctx 109 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 113 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 114 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 116 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.result = req->result; rctx 118 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_final(&rctx->fallback_req); rctx 123 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 127 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 128 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 131 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.nbytes = req->nbytes; rctx 132 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.src = req->src; rctx 133 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.result = req->result; rctx 135 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_finup(&rctx->fallback_req); rctx 140 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 144 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 145 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 148 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_import(&rctx->fallback_req, in); rctx 153 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx = ahash_request_ctx(req); rctx 157 drivers/crypto/rockchip/rk3288_crypto_ahash.c ahash_request_set_tfm(&rctx->fallback_req, ctx->fallback_tfm); rctx 158 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->fallback_req.base.flags = req->base.flags & rctx 161 drivers/crypto/rockchip/rk3288_crypto_ahash.c return crypto_ahash_export(&rctx->fallback_req, out); rctx 197 drivers/crypto/rockchip/rk3288_crypto_ahash.c struct rk_ahash_rctx *rctx; rctx 207 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx = ahash_request_ctx(req); rctx 208 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->mode = 0; rctx 213 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->mode = RK_CRYPTO_HASH_SHA1; rctx 216 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->mode = RK_CRYPTO_HASH_SHA256; rctx 219 drivers/crypto/rockchip/rk3288_crypto_ahash.c rctx->mode = RK_CRYPTO_HASH_MD5; rctx 554 drivers/crypto/sahara.c struct sahara_aes_reqctx *rctx; rctx 568 drivers/crypto/sahara.c rctx = ablkcipher_request_ctx(req); rctx 570 drivers/crypto/sahara.c rctx->mode &= FLAGS_MODE_MASK; rctx 571 drivers/crypto/sahara.c dev->flags = (dev->flags & ~FLAGS_MODE_MASK) | rctx->mode; rctx 635 drivers/crypto/sahara.c struct sahara_aes_reqctx *rctx = ablkcipher_request_ctx(req); rctx 648 drivers/crypto/sahara.c rctx->mode = mode; rctx 772 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx) rctx 776 drivers/crypto/sahara.c hdr = rctx->mode; rctx 778 drivers/crypto/sahara.c if (rctx->first) { rctx 785 drivers/crypto/sahara.c if (rctx->last) rctx 795 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx, rctx 802 drivers/crypto/sahara.c dev->in_sg = rctx->in_sg; rctx 804 drivers/crypto/sahara.c dev->nb_in_sg = sg_nents_for_len(dev->in_sg, rctx->total); rctx 835 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx, rctx 842 drivers/crypto/sahara.c if (rctx->first) rctx 844 drivers/crypto/sahara.c dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); rctx 849 drivers/crypto/sahara.c dev->hw_desc[index]->len1 = rctx->total; rctx 853 drivers/crypto/sahara.c rctx->sg_in_idx = 0; rctx 857 drivers/crypto/sahara.c i = sahara_sha_hw_links_create(dev, rctx, index); rctx 859 drivers/crypto/sahara.c rctx->sg_in_idx = index; rctx 867 drivers/crypto/sahara.c result_len = rctx->context_size; rctx 888 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx, rctx 892 drivers/crypto/sahara.c dev->hw_desc[index]->hdr = sahara_sha_init_hdr(dev, rctx); rctx 894 drivers/crypto/sahara.c dev->hw_desc[index]->len1 = rctx->context_size; rctx 899 drivers/crypto/sahara.c dev->hw_link[index]->len = rctx->context_size; rctx 927 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 935 drivers/crypto/sahara.c len = rctx->buf_cnt + req->nbytes; rctx 938 drivers/crypto/sahara.c if (!rctx->last && (len < block_size)) { rctx 940 drivers/crypto/sahara.c scatterwalk_map_and_copy(rctx->buf + rctx->buf_cnt, req->src, rctx 942 drivers/crypto/sahara.c rctx->buf_cnt += req->nbytes; rctx 948 drivers/crypto/sahara.c if (rctx->buf_cnt) rctx 949 drivers/crypto/sahara.c memcpy(rctx->rembuf, rctx->buf, rctx->buf_cnt); rctx 952 drivers/crypto/sahara.c hash_later = rctx->last ? 0 : len & (block_size - 1); rctx 956 drivers/crypto/sahara.c scatterwalk_map_and_copy(rctx->buf, req->src, offset, rctx 966 drivers/crypto/sahara.c if (rctx->buf_cnt && req->nbytes) { rctx 967 drivers/crypto/sahara.c sg_init_table(rctx->in_sg_chain, 2); rctx 968 drivers/crypto/sahara.c sg_set_buf(rctx->in_sg_chain, rctx->rembuf, rctx->buf_cnt); rctx 970 drivers/crypto/sahara.c sg_chain(rctx->in_sg_chain, 2, req->src); rctx 972 drivers/crypto/sahara.c rctx->total = req->nbytes + rctx->buf_cnt; rctx 973 drivers/crypto/sahara.c rctx->in_sg = rctx->in_sg_chain; rctx 975 drivers/crypto/sahara.c req->src = rctx->in_sg_chain; rctx 977 drivers/crypto/sahara.c } else if (rctx->buf_cnt) { rctx 979 drivers/crypto/sahara.c rctx->in_sg = req->src; rctx 981 drivers/crypto/sahara.c rctx->in_sg = rctx->in_sg_chain; rctx 983 drivers/crypto/sahara.c sg_init_one(rctx->in_sg, rctx->rembuf, rctx->buf_cnt); rctx 984 drivers/crypto/sahara.c rctx->total = rctx->buf_cnt; rctx 987 drivers/crypto/sahara.c rctx->in_sg = req->src; rctx 988 drivers/crypto/sahara.c rctx->total = req->nbytes; rctx 989 drivers/crypto/sahara.c req->src = rctx->in_sg; rctx 993 drivers/crypto/sahara.c rctx->buf_cnt = hash_later; rctx 1001 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 1009 drivers/crypto/sahara.c if (rctx->first) { rctx 1010 drivers/crypto/sahara.c sahara_sha_hw_data_descriptor_create(dev, rctx, req, 0); rctx 1012 drivers/crypto/sahara.c rctx->first = 0; rctx 1014 drivers/crypto/sahara.c memcpy(dev->context_base, rctx->context, rctx->context_size); rctx 1016 drivers/crypto/sahara.c sahara_sha_hw_context_descriptor_create(dev, rctx, req, 0); rctx 1018 drivers/crypto/sahara.c sahara_sha_hw_data_descriptor_create(dev, rctx, req, 1); rctx 1036 drivers/crypto/sahara.c if (rctx->sg_in_idx) rctx 1040 drivers/crypto/sahara.c memcpy(rctx->context, dev->context_base, rctx->context_size); rctx 1043 drivers/crypto/sahara.c memcpy(req->result, rctx->context, rctx->digest_size); rctx 1093 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 1100 drivers/crypto/sahara.c rctx->last = last; rctx 1102 drivers/crypto/sahara.c if (!rctx->active) { rctx 1103 drivers/crypto/sahara.c rctx->active = 1; rctx 1104 drivers/crypto/sahara.c rctx->first = 1; rctx 1119 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 1121 drivers/crypto/sahara.c memset(rctx, 0, sizeof(*rctx)); rctx 1125 drivers/crypto/sahara.c rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA1; rctx 1126 drivers/crypto/sahara.c rctx->digest_size = SHA1_DIGEST_SIZE; rctx 1129 drivers/crypto/sahara.c rctx->mode |= SAHARA_HDR_MDHA_ALG_SHA256; rctx 1130 drivers/crypto/sahara.c rctx->digest_size = SHA256_DIGEST_SIZE; rctx 1136 drivers/crypto/sahara.c rctx->context_size = rctx->digest_size + 4; rctx 1137 drivers/crypto/sahara.c rctx->active = 0; rctx 1167 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 1169 drivers/crypto/sahara.c memcpy(out, rctx, sizeof(struct sahara_sha_reqctx)); rctx 1176 drivers/crypto/sahara.c struct sahara_sha_reqctx *rctx = ahash_request_ctx(req); rctx 1178 drivers/crypto/sahara.c memcpy(rctx, in, sizeof(struct sahara_sha_reqctx)); rctx 721 drivers/crypto/stm32/stm32-cryp.c struct stm32_cryp_reqctx *rctx = ablkcipher_request_ctx(req); rctx 727 drivers/crypto/stm32/stm32-cryp.c rctx->mode = mode; rctx 735 drivers/crypto/stm32/stm32-cryp.c struct stm32_cryp_reqctx *rctx = aead_request_ctx(req); rctx 741 drivers/crypto/stm32/stm32-cryp.c rctx->mode = mode; rctx 916 drivers/crypto/stm32/stm32-cryp.c struct stm32_cryp_reqctx *rctx; rctx 930 drivers/crypto/stm32/stm32-cryp.c rctx = req ? ablkcipher_request_ctx(req) : aead_request_ctx(areq); rctx 931 drivers/crypto/stm32/stm32-cryp.c rctx->mode &= FLG_MODE_MASK; rctx 935 drivers/crypto/stm32/stm32-cryp.c cryp->flags = (cryp->flags & ~FLG_MODE_MASK) | rctx->mode; rctx 252 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); rctx 259 drivers/crypto/stm32/stm32-hash.c switch (rctx->flags & HASH_FLAGS_ALGO_MASK) { rctx 276 drivers/crypto/stm32/stm32-hash.c reg |= (rctx->data_type << HASH_CR_DATATYPE_POS); rctx 278 drivers/crypto/stm32/stm32-hash.c if (rctx->flags & HASH_FLAGS_HMAC) { rctx 295 drivers/crypto/stm32/stm32-hash.c static void stm32_hash_append_sg(struct stm32_hash_request_ctx *rctx) rctx 299 drivers/crypto/stm32/stm32-hash.c while ((rctx->bufcnt < rctx->buflen) && rctx->total) { rctx 300 drivers/crypto/stm32/stm32-hash.c count = min(rctx->sg->length - rctx->offset, rctx->total); rctx 301 drivers/crypto/stm32/stm32-hash.c count = min(count, rctx->buflen - rctx->bufcnt); rctx 304 drivers/crypto/stm32/stm32-hash.c if ((rctx->sg->length == 0) && !sg_is_last(rctx->sg)) { rctx 305 drivers/crypto/stm32/stm32-hash.c rctx->sg = sg_next(rctx->sg); rctx 312 drivers/crypto/stm32/stm32-hash.c scatterwalk_map_and_copy(rctx->buffer + rctx->bufcnt, rctx->sg, rctx 313 drivers/crypto/stm32/stm32-hash.c rctx->offset, count, 0); rctx 315 drivers/crypto/stm32/stm32-hash.c rctx->bufcnt += count; rctx 316 drivers/crypto/stm32/stm32-hash.c rctx->offset += count; rctx 317 drivers/crypto/stm32/stm32-hash.c rctx->total -= count; rctx 319 drivers/crypto/stm32/stm32-hash.c if (rctx->offset == rctx->sg->length) { rctx 320 drivers/crypto/stm32/stm32-hash.c rctx->sg = sg_next(rctx->sg); rctx 321 drivers/crypto/stm32/stm32-hash.c if (rctx->sg) rctx 322 drivers/crypto/stm32/stm32-hash.c rctx->offset = 0; rctx 324 drivers/crypto/stm32/stm32-hash.c rctx->total = 0; rctx 380 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); rctx 383 drivers/crypto/stm32/stm32-hash.c dev_dbg(hdev->dev, "%s flags %lx\n", __func__, rctx->flags); rctx 385 drivers/crypto/stm32/stm32-hash.c final = (rctx->flags & HASH_FLAGS_FINUP); rctx 387 drivers/crypto/stm32/stm32-hash.c while ((rctx->total >= rctx->buflen) || rctx 388 drivers/crypto/stm32/stm32-hash.c (rctx->bufcnt + rctx->total >= rctx->buflen)) { rctx 389 drivers/crypto/stm32/stm32-hash.c stm32_hash_append_sg(rctx); rctx 390 drivers/crypto/stm32/stm32-hash.c bufcnt = rctx->bufcnt; rctx 391 drivers/crypto/stm32/stm32-hash.c rctx->bufcnt = 0; rctx 392 drivers/crypto/stm32/stm32-hash.c err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt, 0); rctx 395 drivers/crypto/stm32/stm32-hash.c stm32_hash_append_sg(rctx); rctx 398 drivers/crypto/stm32/stm32-hash.c bufcnt = rctx->bufcnt; rctx 399 drivers/crypto/stm32/stm32-hash.c rctx->bufcnt = 0; rctx 400 drivers/crypto/stm32/stm32-hash.c err = stm32_hash_xmit_cpu(hdev, rctx->buffer, bufcnt, rctx 401 drivers/crypto/stm32/stm32-hash.c (rctx->flags & HASH_FLAGS_FINUP)); rctx 478 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); rctx 489 drivers/crypto/stm32/stm32-hash.c sg_init_one(&rctx->sg_key, ctx->key, rctx 492 drivers/crypto/stm32/stm32-hash.c rctx->dma_ct = dma_map_sg(hdev->dev, &rctx->sg_key, 1, rctx 494 drivers/crypto/stm32/stm32-hash.c if (rctx->dma_ct == 0) { rctx 499 drivers/crypto/stm32/stm32-hash.c err = stm32_hash_xmit_dma(hdev, &rctx->sg_key, ctx->keylen, 0); rctx 501 drivers/crypto/stm32/stm32-hash.c dma_unmap_sg(hdev->dev, &rctx->sg_key, 1, DMA_TO_DEVICE); rctx 542 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(hdev->req); rctx 546 drivers/crypto/stm32/stm32-hash.c u32 *buffer = (void *)rctx->buffer; rctx 548 drivers/crypto/stm32/stm32-hash.c rctx->sg = hdev->req->src; rctx 549 drivers/crypto/stm32/stm32-hash.c rctx->total = hdev->req->nbytes; rctx 551 drivers/crypto/stm32/stm32-hash.c rctx->nents = sg_nents(rctx->sg); rctx 553 drivers/crypto/stm32/stm32-hash.c if (rctx->nents < 0) rctx 564 drivers/crypto/stm32/stm32-hash.c for_each_sg(rctx->sg, tsg, rctx->nents, i) { rctx 573 drivers/crypto/stm32/stm32-hash.c rctx->sg, rctx->nents, rctx 574 drivers/crypto/stm32/stm32-hash.c rctx->buffer, sg->length - len, rctx 575 drivers/crypto/stm32/stm32-hash.c rctx->total - sg->length + len); rctx 587 drivers/crypto/stm32/stm32-hash.c rctx->dma_ct = dma_map_sg(hdev->dev, sg, 1, rctx 589 drivers/crypto/stm32/stm32-hash.c if (rctx->dma_ct == 0) { rctx 683 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 686 drivers/crypto/stm32/stm32-hash.c rctx->hdev = hdev; rctx 688 drivers/crypto/stm32/stm32-hash.c rctx->flags = HASH_FLAGS_CPU; rctx 690 drivers/crypto/stm32/stm32-hash.c rctx->digcnt = crypto_ahash_digestsize(tfm); rctx 691 drivers/crypto/stm32/stm32-hash.c switch (rctx->digcnt) { rctx 693 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_MD5; rctx 696 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_SHA1; rctx 699 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_SHA224; rctx 702 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_SHA256; rctx 708 drivers/crypto/stm32/stm32-hash.c rctx->bufcnt = 0; rctx 709 drivers/crypto/stm32/stm32-hash.c rctx->buflen = HASH_BUFLEN; rctx 710 drivers/crypto/stm32/stm32-hash.c rctx->total = 0; rctx 711 drivers/crypto/stm32/stm32-hash.c rctx->offset = 0; rctx 712 drivers/crypto/stm32/stm32-hash.c rctx->data_type = HASH_DATA_8_BITS; rctx 714 drivers/crypto/stm32/stm32-hash.c memset(rctx->buffer, 0, HASH_BUFLEN); rctx 717 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_HMAC; rctx 719 drivers/crypto/stm32/stm32-hash.c dev_dbg(hdev->dev, "%s Flags %lx\n", __func__, rctx->flags); rctx 732 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 734 drivers/crypto/stm32/stm32-hash.c int buflen = rctx->bufcnt; rctx 736 drivers/crypto/stm32/stm32-hash.c rctx->bufcnt = 0; rctx 738 drivers/crypto/stm32/stm32-hash.c if (!(rctx->flags & HASH_FLAGS_CPU)) rctx 741 drivers/crypto/stm32/stm32-hash.c err = stm32_hash_xmit_cpu(hdev, rctx->buffer, buflen, 1); rctx 749 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 750 drivers/crypto/stm32/stm32-hash.c u32 *hash = (u32 *)rctx->digest; rctx 753 drivers/crypto/stm32/stm32-hash.c switch (rctx->flags & HASH_FLAGS_ALGO_MASK) { rctx 771 drivers/crypto/stm32/stm32-hash.c hash[i] = be32_to_cpu(stm32_hash_read(rctx->hdev, rctx 777 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 782 drivers/crypto/stm32/stm32-hash.c memcpy(req->result, rctx->digest, rctx->digcnt); rctx 789 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 790 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_dev *hdev = rctx->hdev; rctx 801 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_ERRORS; rctx 811 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx) rctx 841 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx; rctx 848 drivers/crypto/stm32/stm32-hash.c rctx = ahash_request_ctx(req); rctx 851 drivers/crypto/stm32/stm32-hash.c rctx->op, req->nbytes); rctx 853 drivers/crypto/stm32/stm32-hash.c return stm32_hash_hw_init(hdev, rctx); rctx 862 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx; rctx 870 drivers/crypto/stm32/stm32-hash.c rctx = ahash_request_ctx(req); rctx 872 drivers/crypto/stm32/stm32-hash.c if (rctx->op == HASH_OP_UPDATE) rctx 874 drivers/crypto/stm32/stm32-hash.c else if (rctx->op == HASH_OP_FINAL) rctx 886 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 890 drivers/crypto/stm32/stm32-hash.c rctx->op = op; rctx 897 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 899 drivers/crypto/stm32/stm32-hash.c if (!req->nbytes || !(rctx->flags & HASH_FLAGS_CPU)) rctx 902 drivers/crypto/stm32/stm32-hash.c rctx->total = req->nbytes; rctx 903 drivers/crypto/stm32/stm32-hash.c rctx->sg = req->src; rctx 904 drivers/crypto/stm32/stm32-hash.c rctx->offset = 0; rctx 906 drivers/crypto/stm32/stm32-hash.c if ((rctx->bufcnt + rctx->total < rctx->buflen)) { rctx 907 drivers/crypto/stm32/stm32-hash.c stm32_hash_append_sg(rctx); rctx 916 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 918 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_FINUP; rctx 925 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 930 drivers/crypto/stm32/stm32-hash.c rctx->flags |= HASH_FLAGS_FINUP; rctx 933 drivers/crypto/stm32/stm32-hash.c rctx->flags &= ~HASH_FLAGS_CPU; rctx 956 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 967 drivers/crypto/stm32/stm32-hash.c rctx->hw_context = kmalloc_array(3 + HASH_CSR_REGISTER_NUMBER, rctx 971 drivers/crypto/stm32/stm32-hash.c preg = rctx->hw_context; rctx 982 drivers/crypto/stm32/stm32-hash.c memcpy(out, rctx, sizeof(*rctx)); rctx 989 drivers/crypto/stm32/stm32-hash.c struct stm32_hash_request_ctx *rctx = ahash_request_ctx(req); rctx 996 drivers/crypto/stm32/stm32-hash.c memcpy(rctx, in, sizeof(*rctx)); rctx 998 drivers/crypto/stm32/stm32-hash.c preg = rctx->hw_context; rctx 1014 drivers/crypto/stm32/stm32-hash.c kfree(rctx->hw_context); rctx 351 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 353 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | rctx 362 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 364 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_AES | SS_CBC | SS_ENABLED | SS_DECRYPTION | rctx 374 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 376 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | rctx 385 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 387 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_AES | SS_ECB | SS_ENABLED | SS_DECRYPTION | rctx 397 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 399 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | rctx 408 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 410 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_DES | SS_CBC | SS_ENABLED | SS_DECRYPTION | rctx 420 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 422 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | rctx 431 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 433 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_DES | SS_ECB | SS_ENABLED | SS_DECRYPTION | rctx 443 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 445 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_ENCRYPTION | rctx 454 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 456 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_3DES | SS_CBC | SS_ENABLED | SS_DECRYPTION | rctx 466 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 468 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_ENCRYPTION | rctx 477 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c struct sun4i_cipher_req_ctx *rctx = skcipher_request_ctx(areq); rctx 479 drivers/crypto/sunxi-ss/sun4i-ss-cipher.c rctx->mode = SS_OP_3DES | SS_ECB | SS_ENABLED | SS_DECRYPTION | rctx 494 drivers/infiniband/sw/siw/siw.h #define rx_wqe(rctx) (&(rctx)->wqe_active) rctx 495 drivers/infiniband/sw/siw/siw.h #define rx_mem(rctx) ((rctx)->wqe_active.mem[0]) rctx 2733 drivers/scsi/megaraid/megaraid_sas_fusion.c struct RAID_CONTEXT *rctx; rctx 2741 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx = &io_request->RaidContext.raid_context; rctx 2744 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->virtual_disk_tgt_id = cpu_to_le16(device_id); rctx 2745 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->status = 0; rctx 2746 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->ex_status = 0; rctx 2821 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->reg_lock_flags = 0; rctx 2824 drivers/scsi/megaraid/megaraid_sas_fusion.c if (MR_BuildRaidContext(instance, &io_info, rctx, rctx 2902 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->type = MPI2_TYPE_CUDA; rctx 2903 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->nseg = 0x1; rctx 2905 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->reg_lock_flags |= rctx 2927 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->span_arm = io_info.span_arm; rctx 2950 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->timeout_value = rctx 2957 drivers/scsi/megaraid/megaraid_sas_fusion.c (rctx->reg_lock_flags == REGION_TYPE_UNUSED)) rctx 2961 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->type = MPI2_TYPE_CUDA; rctx 2962 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->reg_lock_flags |= rctx 2965 drivers/scsi/megaraid/megaraid_sas_fusion.c rctx->nseg = 0x1; rctx 1262 include/linux/perf_event.h struct hlist_head *head, int rctx, rctx 1331 include/linux/perf_event.h extern void perf_swevent_put_recursion_context(int rctx); rctx 1411 include/linux/perf_event.h static inline void perf_swevent_put_recursion_context(int rctx) { } rctx 632 include/linux/trace_events.h void perf_trace_run_bpf_submit(void *raw_data, int size, int rctx, rctx 638 include/linux/trace_events.h perf_trace_buf_submit(void *raw_data, int size, int rctx, u16 type, rctx 642 include/linux/trace_events.h perf_tp_event(type, count, raw_data, size, regs, head, rctx, task); rctx 44 include/trace/perf.h int rctx; \ rctx 58 include/trace/perf.h entry = perf_trace_buf_alloc(__entry_size, &__regs, &rctx); \ rctx 68 include/trace/perf.h perf_trace_run_bpf_submit(entry, __entry_size, rctx, \ rctx 152 kernel/events/callchain.c static struct perf_callchain_entry *get_callchain_entry(int *rctx) rctx 157 kernel/events/callchain.c *rctx = get_recursion_context(this_cpu_ptr(callchain_recursion)); rctx 158 kernel/events/callchain.c if (*rctx == -1) rctx 168 kernel/events/callchain.c (*rctx * perf_callchain_entry__sizeof())); rctx 172 kernel/events/callchain.c put_callchain_entry(int rctx) rctx 174 kernel/events/callchain.c put_recursion_context(this_cpu_ptr(callchain_recursion), rctx); rctx 183 kernel/events/callchain.c int rctx; rctx 185 kernel/events/callchain.c entry = get_callchain_entry(&rctx); rctx 186 kernel/events/callchain.c if (rctx == -1) rctx 229 kernel/events/callchain.c put_callchain_entry(rctx); rctx 6012 kernel/events/core.c int rctx; rctx 6014 kernel/events/core.c rctx = perf_swevent_get_recursion_context(); rctx 6027 kernel/events/core.c if (rctx >= 0) rctx 6028 kernel/events/core.c perf_swevent_put_recursion_context(rctx); rctx 8444 kernel/events/core.c void perf_swevent_put_recursion_context(int rctx) rctx 8448 kernel/events/core.c put_recursion_context(swhash->recursion, rctx); rctx 8464 kernel/events/core.c int rctx; rctx 8467 kernel/events/core.c rctx = perf_swevent_get_recursion_context(); rctx 8468 kernel/events/core.c if (unlikely(rctx < 0)) rctx 8473 kernel/events/core.c perf_swevent_put_recursion_context(rctx); rctx 8705 kernel/events/core.c void perf_trace_run_bpf_submit(void *raw_data, int size, int rctx, rctx 8713 kernel/events/core.c perf_swevent_put_recursion_context(rctx); rctx 8718 kernel/events/core.c rctx, task); rctx 8723 kernel/events/core.c struct pt_regs *regs, struct hlist_head *head, int rctx, rctx 8773 kernel/events/core.c perf_swevent_put_recursion_context(rctx); rctx 207 kernel/events/internal.h int rctx; rctx 210 kernel/events/internal.h rctx = 3; rctx 212 kernel/events/internal.h rctx = 2; rctx 214 kernel/events/internal.h rctx = 1; rctx 216 kernel/events/internal.h rctx = 0; rctx 218 kernel/events/internal.h if (recursion[rctx]) rctx 221 kernel/events/internal.h recursion[rctx]++; rctx 224 kernel/events/internal.h return rctx; rctx 227 kernel/events/internal.h static inline void put_recursion_context(int *recursion, int rctx) rctx 230 kernel/events/internal.h recursion[rctx]--; rctx 393 kernel/trace/trace_event_perf.c int rctx; rctx 401 kernel/trace/trace_event_perf.c *rctxp = rctx = perf_swevent_get_recursion_context(); rctx 402 kernel/trace/trace_event_perf.c if (rctx < 0) rctx 406 kernel/trace/trace_event_perf.c *regs = this_cpu_ptr(&__perf_regs[rctx]); rctx 407 kernel/trace/trace_event_perf.c raw_data = this_cpu_ptr(perf_trace_buf[rctx]); rctx 436 kernel/trace/trace_event_perf.c int rctx; rctx 459 kernel/trace/trace_event_perf.c entry = perf_trace_buf_alloc(ENTRY_SIZE, NULL, &rctx); rctx 465 kernel/trace/trace_event_perf.c perf_trace_buf_submit(entry, ENTRY_SIZE, rctx, TRACE_FN, rctx 1380 kernel/trace/trace_kprobe.c int rctx; rctx 1408 kernel/trace/trace_kprobe.c entry = perf_trace_buf_alloc(size, NULL, &rctx); rctx 1415 kernel/trace/trace_kprobe.c perf_trace_buf_submit(entry, size, rctx, call->event.type, 1, regs, rctx 1430 kernel/trace/trace_kprobe.c int rctx; rctx 1444 kernel/trace/trace_kprobe.c entry = perf_trace_buf_alloc(size, NULL, &rctx); rctx 1451 kernel/trace/trace_kprobe.c perf_trace_buf_submit(entry, size, rctx, call->event.type, 1, regs, rctx 591 kernel/trace/trace_syscalls.c int rctx; rctx 614 kernel/trace/trace_syscalls.c rec = perf_trace_buf_alloc(size, NULL, &rctx); rctx 625 kernel/trace/trace_syscalls.c perf_swevent_put_recursion_context(rctx); rctx 629 kernel/trace/trace_syscalls.c perf_trace_buf_submit(rec, size, rctx, rctx 690 kernel/trace/trace_syscalls.c int rctx; rctx 712 kernel/trace/trace_syscalls.c rec = perf_trace_buf_alloc(size, NULL, &rctx); rctx 722 kernel/trace/trace_syscalls.c perf_swevent_put_recursion_context(rctx); rctx 726 kernel/trace/trace_syscalls.c perf_trace_buf_submit(rec, size, rctx, sys_data->exit_event->event.type, rctx 1334 kernel/trace/trace_uprobe.c int rctx; rctx 1351 kernel/trace/trace_uprobe.c entry = perf_trace_buf_alloc(size, NULL, &rctx); rctx 1372 kernel/trace/trace_uprobe.c perf_trace_buf_submit(entry, size, rctx, call->event.type, 1, regs,