pd_uinfo 176 drivers/crypto/amcc/crypto4xx_core.c dev->pdr_uinfo = kcalloc(PPC4XX_NUM_PD, sizeof(struct pd_uinfo), pd_uinfo 199 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[i]; pd_uinfo 205 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->sa_va = &dev->shadow_sa_pool[i].sa; pd_uinfo 208 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->sr_va = &dev->shadow_sr_pool[i]; pd_uinfo 209 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->sr_pa = dev->shadow_sr_pool_pa + pd_uinfo 254 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx]; pd_uinfo 259 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->state = PD_ENTRY_FREE; pd_uinfo 447 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo, pd_uinfo 451 drivers/crypto/amcc/crypto4xx_core.c unsigned int first_sd = pd_uinfo->first_sd; pd_uinfo 463 drivers/crypto/amcc/crypto4xx_core.c last_sd = (first_sd + pd_uinfo->num_sd); pd_uinfo 488 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo, pd_uinfo 494 drivers/crypto/amcc/crypto4xx_core.c memcpy(dst, pd_uinfo->sr_va->save_digest, pd_uinfo 500 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo) pd_uinfo 503 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->num_gd) { pd_uinfo 504 drivers/crypto/amcc/crypto4xx_core.c for (i = 0; i < pd_uinfo->num_gd; i++) pd_uinfo 506 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_gd = 0xffffffff; pd_uinfo 507 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->num_gd = 0; pd_uinfo 509 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->num_sd) { pd_uinfo 510 drivers/crypto/amcc/crypto4xx_core.c for (i = 0; i < pd_uinfo->num_sd; i++) pd_uinfo 513 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_sd = 0xffffffff; pd_uinfo 514 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->num_sd = 0; pd_uinfo 519 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo, pd_uinfo 526 drivers/crypto/amcc/crypto4xx_core.c req = skcipher_request_cast(pd_uinfo->async_req); pd_uinfo 528 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->sa_va->sa_command_0.bf.scatter) { pd_uinfo 529 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, pd_uinfo 532 drivers/crypto/amcc/crypto4xx_core.c dst = pd_uinfo->dest_va; pd_uinfo 537 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->sa_va->sa_command_0.bf.save_iv == SA_SAVE_IV) { pd_uinfo 541 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->sr_va->save_iv, pd_uinfo 545 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_ret_sg_desc(dev, pd_uinfo); pd_uinfo 547 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->state & PD_ENTRY_BUSY) pd_uinfo 553 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo) pd_uinfo 558 drivers/crypto/amcc/crypto4xx_core.c ahash_req = ahash_request_cast(pd_uinfo->async_req); pd_uinfo 561 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_copy_digest_to_dst(ahash_req->result, pd_uinfo, pd_uinfo 563 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_ret_sg_desc(dev, pd_uinfo); pd_uinfo 565 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->state & PD_ENTRY_BUSY) pd_uinfo 571 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo, pd_uinfo 574 drivers/crypto/amcc/crypto4xx_core.c struct aead_request *aead_req = container_of(pd_uinfo->async_req, pd_uinfo 576 drivers/crypto/amcc/crypto4xx_core.c struct scatterlist *dst = pd_uinfo->dest_va; pd_uinfo 582 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->sa_va->sa_command_0.bf.scatter) { pd_uinfo 583 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_copy_pkt_to_dst(dev, pd, pd_uinfo, pd_uinfo 591 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->sa_va->sa_command_0.bf.dir == DIR_OUTBOUND) { pd_uinfo 593 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_memcpy_from_le32(icv, pd_uinfo->sr_va->save_digest, pd_uinfo 606 drivers/crypto/amcc/crypto4xx_core.c if (crypto_memneq(icv, pd_uinfo->sr_va->save_digest, cp_len)) pd_uinfo 610 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_ret_sg_desc(dev, pd_uinfo); pd_uinfo 628 drivers/crypto/amcc/crypto4xx_core.c if (pd_uinfo->state & PD_ENTRY_BUSY) pd_uinfo 637 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo = &dev->pdr_uinfo[idx]; pd_uinfo 639 drivers/crypto/amcc/crypto4xx_core.c switch (crypto_tfm_alg_type(pd_uinfo->async_req->tfm)) { pd_uinfo 641 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_cipher_done(dev, pd_uinfo, pd); pd_uinfo 644 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_aead_done(dev, pd_uinfo, pd); pd_uinfo 647 drivers/crypto/amcc/crypto4xx_core.c crypto4xx_ahash_done(dev, pd_uinfo); pd_uinfo 698 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo; pd_uinfo 811 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo = &dev->pdr_uinfo[pd_entry]; pd_uinfo 812 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->num_gd = num_gd; pd_uinfo 813 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->num_sd = num_sd; pd_uinfo 814 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->dest_va = dst; pd_uinfo 815 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->async_req = req; pd_uinfo 818 drivers/crypto/amcc/crypto4xx_core.c memcpy(pd_uinfo->sr_va->save_iv, iv, iv_len); pd_uinfo 820 drivers/crypto/amcc/crypto4xx_core.c sa = pd_uinfo->sa_va; pd_uinfo 825 drivers/crypto/amcc/crypto4xx_core.c *(u32 *)((unsigned long)sa + offset_to_sr_ptr) = pd_uinfo->sr_pa; pd_uinfo 833 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_gd = fst_gd; pd_uinfo 869 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_gd = 0xffffffff; pd_uinfo 876 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_sd = 0xffffffff; pd_uinfo 889 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->first_sd = fst_sd; pd_uinfo 923 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo->state = PD_ENTRY_INUSE | (is_busy ? PD_ENTRY_BUSY : 0); pd_uinfo 1078 drivers/crypto/amcc/crypto4xx_core.c struct pd_uinfo *pd_uinfo; pd_uinfo 1084 drivers/crypto/amcc/crypto4xx_core.c pd_uinfo = &core_dev->dev->pdr_uinfo[tail]; pd_uinfo 1086 drivers/crypto/amcc/crypto4xx_core.c if ((pd_uinfo->state & PD_ENTRY_INUSE) && pd_uinfo 98 drivers/crypto/amcc/crypto4xx_core.h struct pd_uinfo *pdr_uinfo;