Searched refs:nx_ctx (Results 1 - 10 of 10) sorted by relevance

/linux-4.1.27/drivers/crypto/nx/
H A Dnx-aes-xcbc.c44 struct nx_crypto_ctx *nx_ctx = crypto_shash_ctx(desc); nx_xcbc_set_key() local
45 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_xcbc_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; nx_xcbc_set_key()
73 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_xcbc_empty() local
74 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_xcbc_empty()
93 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys, &len, nx_xcbc_empty()
94 nx_ctx->ap->sglen); nx_xcbc_empty()
99 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) keys, &len, nx_xcbc_empty()
100 nx_ctx->ap->sglen); nx_xcbc_empty()
105 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_empty()
106 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_xcbc_empty()
108 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_xcbc_empty()
112 atomic_inc(&(nx_ctx->stats->aes_ops)); nx_xcbc_empty()
121 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) keys[1], &len, nx_xcbc_empty()
122 nx_ctx->ap->sglen); nx_xcbc_empty()
128 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, nx_xcbc_empty()
129 nx_ctx->ap->sglen); nx_xcbc_empty()
134 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_empty()
135 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_xcbc_empty()
137 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_xcbc_empty()
141 atomic_inc(&(nx_ctx->stats->aes_ops)); nx_xcbc_empty()
154 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); nx_crypto_ctx_aes_xcbc_init2() local
155 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_crypto_ctx_aes_xcbc_init2()
162 nx_ctx_init(nx_ctx, HCOP_FC_AES); nx_crypto_ctx_aes_xcbc_init2()
184 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_xcbc_update() local
185 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_xcbc_update()
194 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_xcbc_update()
209 in_sg = nx_ctx->in_sg; nx_xcbc_update()
211 nx_ctx->ap->sglen); nx_xcbc_update()
213 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_xcbc_update()
216 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, nx_xcbc_update()
217 &len, nx_ctx->ap->sglen); nx_xcbc_update()
224 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_xcbc_update()
244 in_sg = nx_build_sg_list(nx_ctx->in_sg, nx_xcbc_update()
265 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * nx_xcbc_update()
277 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { nx_xcbc_update()
282 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_xcbc_update()
287 atomic_inc(&(nx_ctx->stats->aes_ops)); nx_xcbc_update()
295 in_sg = nx_ctx->in_sg; nx_xcbc_update()
303 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_xcbc_update()
310 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_xcbc_final() local
311 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; nx_xcbc_final()
317 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_xcbc_final()
339 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *)sctx->buffer, nx_xcbc_final()
340 &len, nx_ctx->ap->sglen); nx_xcbc_final()
348 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, nx_xcbc_final()
349 nx_ctx->ap->sglen); nx_xcbc_final()
356 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_xcbc_final()
357 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_xcbc_final()
359 if (!nx_ctx->op.outlen) { nx_xcbc_final()
364 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_xcbc_final()
369 atomic_inc(&(nx_ctx->stats->aes_ops)); nx_xcbc_final()
373 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_xcbc_final()
H A Dnx-aes-cbc.c37 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); cbc_aes_nx_set_key() local
38 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; cbc_aes_nx_set_key()
40 nx_ctx_init(nx_ctx, HCOP_FC_AES); cbc_aes_nx_set_key()
45 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; cbc_aes_nx_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; cbc_aes_nx_set_key()
53 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; cbc_aes_nx_set_key()
71 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); cbc_aes_nx_crypt() local
72 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; cbc_aes_nx_crypt()
77 spin_lock_irqsave(&nx_ctx->lock, irq_flags); cbc_aes_nx_crypt()
87 rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, cbc_aes_nx_crypt()
92 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { cbc_aes_nx_crypt()
97 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, cbc_aes_nx_crypt()
103 atomic_inc(&(nx_ctx->stats->aes_ops)); cbc_aes_nx_crypt()
105 &(nx_ctx->stats->aes_bytes)); cbc_aes_nx_crypt()
110 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); cbc_aes_nx_crypt()
H A Dnx-aes-ecb.c37 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); ecb_aes_nx_set_key() local
38 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; ecb_aes_nx_set_key()
40 nx_ctx_init(nx_ctx, HCOP_FC_AES); ecb_aes_nx_set_key()
45 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; ecb_aes_nx_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; ecb_aes_nx_set_key()
53 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; ecb_aes_nx_set_key()
71 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); ecb_aes_nx_crypt() local
72 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ecb_aes_nx_crypt()
77 spin_lock_irqsave(&nx_ctx->lock, irq_flags); ecb_aes_nx_crypt()
87 rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, ecb_aes_nx_crypt()
92 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { ecb_aes_nx_crypt()
97 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, ecb_aes_nx_crypt()
102 atomic_inc(&(nx_ctx->stats->aes_ops)); ecb_aes_nx_crypt()
104 &(nx_ctx->stats->aes_bytes)); ecb_aes_nx_crypt()
110 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); ecb_aes_nx_crypt()
H A Dnx-aes-ccm.c39 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); ccm_aes_nx_set_key() local
40 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ccm_aes_nx_set_key()
41 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; ccm_aes_nx_set_key()
43 nx_ctx_init(nx_ctx, HCOP_FC_AES); ccm_aes_nx_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; ccm_aes_nx_set_key()
69 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); ccm4309_aes_nx_set_key() local
76 memcpy(nx_ctx->priv.ccm.nonce, in_key + key_len, 3); ccm4309_aes_nx_set_key()
174 struct nx_crypto_ctx *nx_ctx, generate_pat()
179 struct nx_sg *nx_insg = nx_ctx->in_sg; generate_pat()
180 struct nx_sg *nx_outsg = nx_ctx->out_sg; generate_pat()
204 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; generate_pat()
209 b0 = nx_ctx->csbcpb->cpb.aes_ccm.in_pat_or_b0; generate_pat()
210 b1 = nx_ctx->priv.ccm.iauth_tag; generate_pat()
216 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; generate_pat()
217 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; generate_pat()
220 b0 = nx_ctx->csbcpb_aead->cpb.aes_cca.b0; generate_pat()
221 b1 = nx_ctx->csbcpb_aead->cpb.aes_cca.b1; generate_pat()
254 nx_insg = nx_build_sg_list(nx_insg, b1, &len, nx_ctx->ap->sglen); generate_pat()
260 nx_ctx->ap->sglen); generate_pat()
267 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_insg) * generate_pat()
269 nx_ctx->op.outlen = (nx_ctx->out_sg - nx_outsg) * generate_pat()
272 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_ENDE_ENCRYPT; generate_pat()
273 NX_CPB_FDM(nx_ctx->csbcpb) |= NX_FDM_INTERMEDIATE; generate_pat()
275 result = nx_ctx->csbcpb->cpb.aes_ccm.out_pat_or_mac; generate_pat()
277 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, generate_pat()
282 atomic_inc(&(nx_ctx->stats->aes_ops)); generate_pat()
283 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); generate_pat()
291 max_sg_len = min_t(u64, nx_ctx->ap->sglen, generate_pat()
294 nx_ctx->ap->databytelen/NX_PAGE_SIZE); generate_pat()
298 nx_ctx->ap->databytelen); generate_pat()
300 nx_insg = nx_walk_and_build(nx_ctx->in_sg, generate_pat()
301 nx_ctx->ap->sglen, generate_pat()
306 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= generate_pat()
309 NX_CPB_FDM(nx_ctx->csbcpb_aead) &= generate_pat()
314 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_insg) * generate_pat()
317 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; generate_pat()
319 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead, generate_pat()
324 memcpy(nx_ctx->csbcpb_aead->cpb.aes_cca.b0, generate_pat()
325 nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0, generate_pat()
328 NX_CPB_FDM(nx_ctx->csbcpb_aead) |= NX_FDM_CONTINUATION; generate_pat()
330 atomic_inc(&(nx_ctx->stats->aes_ops)); generate_pat()
332 &(nx_ctx->stats->aes_bytes)); generate_pat()
337 result = nx_ctx->csbcpb_aead->cpb.aes_cca.out_pat_or_b0; generate_pat()
348 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); ccm_nx_decrypt() local
349 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ccm_nx_decrypt()
352 struct nx_ccm_priv *priv = &nx_ctx->priv.ccm; ccm_nx_decrypt()
357 spin_lock_irqsave(&nx_ctx->lock, irq_flags); ccm_nx_decrypt()
366 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, ccm_nx_decrypt()
383 NX_CPB_FDM(nx_ctx->csbcpb) &= ~NX_FDM_ENDE_ENCRYPT; ccm_nx_decrypt()
385 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, ccm_nx_decrypt()
391 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, ccm_nx_decrypt()
408 atomic_inc(&(nx_ctx->stats->aes_ops)); ccm_nx_decrypt()
410 &(nx_ctx->stats->aes_bytes)); ccm_nx_decrypt()
418 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); ccm_nx_decrypt()
425 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); ccm_nx_encrypt() local
426 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ccm_nx_encrypt()
433 spin_lock_irqsave(&nx_ctx->lock, irq_flags); ccm_nx_encrypt()
435 rc = generate_pat(desc->info, req, nx_ctx, authsize, nbytes, ccm_nx_encrypt()
453 rc = nx_build_sg_lists(nx_ctx, desc, req->dst, req->src, ccm_nx_encrypt()
459 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, ccm_nx_encrypt()
476 atomic_inc(&(nx_ctx->stats->aes_ops)); ccm_nx_encrypt()
478 &(nx_ctx->stats->aes_bytes)); ccm_nx_encrypt()
490 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); ccm_nx_encrypt()
496 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); ccm4309_aes_nx_encrypt() local
502 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); ccm4309_aes_nx_encrypt()
528 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); ccm4309_aes_nx_decrypt() local
534 memcpy(iv + 1, nx_ctx->priv.ccm.nonce, 3); ccm4309_aes_nx_decrypt()
172 generate_pat(u8 *iv, struct aead_request *req, struct nx_crypto_ctx *nx_ctx, unsigned int authsize, unsigned int nbytes, u8 *out) generate_pat() argument
H A Dnx-sha256.c34 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); nx_crypto_ctx_sha256_init() local
41 nx_ctx_init(nx_ctx, HCOP_FC_SHA); nx_crypto_ctx_sha256_init()
43 nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA256]; nx_crypto_ctx_sha256_init()
45 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA256); nx_crypto_ctx_sha256_init()
72 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_sha256_update() local
73 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; nx_sha256_update()
82 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_sha256_update()
99 max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_sha256_update()
102 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_sha256_update()
105 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, nx_sha256_update()
107 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_sha256_update()
116 struct nx_sg *in_sg = nx_ctx->in_sg; nx_sha256_update()
129 used_sgs = in_sg - nx_ctx->in_sg; nx_sha256_update()
147 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha256_update()
160 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { nx_sha256_update()
165 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_sha256_update()
170 atomic_inc(&(nx_ctx->stats->sha256_ops)); nx_sha256_update()
185 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_sha256_update()
192 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_sha256_final() local
193 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; nx_sha256_final()
200 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_sha256_final()
202 max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_sha256_final()
205 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_sha256_final()
223 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) sctx->buf, nx_sha256_final()
232 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, max_sg_len); nx_sha256_final()
239 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha256_final()
240 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_sha256_final()
241 if (!nx_ctx->op.outlen) { nx_sha256_final()
246 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_sha256_final()
251 atomic_inc(&(nx_ctx->stats->sha256_ops)); nx_sha256_final()
253 atomic64_add(sctx->count, &(nx_ctx->stats->sha256_bytes)); nx_sha256_final()
256 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_sha256_final()
H A Dnx-sha512.c33 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); nx_crypto_ctx_sha512_init() local
40 nx_ctx_init(nx_ctx, HCOP_FC_SHA); nx_crypto_ctx_sha512_init()
42 nx_ctx->ap = &nx_ctx->props[NX_PROPS_SHA512]; nx_crypto_ctx_sha512_init()
44 NX_CPB_SET_DIGEST_SIZE(nx_ctx->csbcpb, NX_DS_SHA512); nx_crypto_ctx_sha512_init()
72 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_sha512_update() local
73 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; nx_sha512_update()
82 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_sha512_update()
99 max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_sha512_update()
102 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_sha512_update()
105 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *)sctx->state, nx_sha512_update()
107 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_sha512_update()
116 struct nx_sg *in_sg = nx_ctx->in_sg; nx_sha512_update()
128 used_sgs = in_sg - nx_ctx->in_sg; nx_sha512_update()
146 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha512_update()
164 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { nx_sha512_update()
169 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_sha512_update()
174 atomic_inc(&(nx_ctx->stats->sha512_ops)); nx_sha512_update()
188 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_sha512_update()
195 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&desc->tfm->base); nx_sha512_final() local
196 struct nx_csbcpb *csbcpb = (struct nx_csbcpb *)nx_ctx->csbcpb; nx_sha512_final()
204 spin_lock_irqsave(&nx_ctx->lock, irq_flags); nx_sha512_final()
206 max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_sha512_final()
209 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_sha512_final()
232 in_sg = nx_build_sg_list(nx_ctx->in_sg, sctx->buf, &len, nx_sha512_final()
241 out_sg = nx_build_sg_list(nx_ctx->out_sg, out, &len, nx_sha512_final()
244 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); nx_sha512_final()
245 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); nx_sha512_final()
247 if (!nx_ctx->op.outlen) { nx_sha512_final()
252 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, nx_sha512_final()
257 atomic_inc(&(nx_ctx->stats->sha512_ops)); nx_sha512_final()
258 atomic64_add(sctx->count[0], &(nx_ctx->stats->sha512_bytes)); nx_sha512_final()
262 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); nx_sha512_final()
H A Dnx-aes-ctr.c38 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); ctr_aes_nx_set_key() local
39 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ctr_aes_nx_set_key()
41 nx_ctx_init(nx_ctx, HCOP_FC_AES); ctr_aes_nx_set_key()
46 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; ctr_aes_nx_set_key()
50 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; ctr_aes_nx_set_key()
54 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; ctr_aes_nx_set_key()
70 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); ctr3686_aes_nx_set_key() local
75 memcpy(nx_ctx->priv.ctr.nonce, ctr3686_aes_nx_set_key()
89 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); ctr_aes_nx_crypt() local
90 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; ctr_aes_nx_crypt()
95 spin_lock_irqsave(&nx_ctx->lock, irq_flags); ctr_aes_nx_crypt()
100 rc = nx_build_sg_lists(nx_ctx, desc, dst, src, &to_process, ctr_aes_nx_crypt()
105 if (!nx_ctx->op.inlen || !nx_ctx->op.outlen) { ctr_aes_nx_crypt()
110 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, ctr_aes_nx_crypt()
117 atomic_inc(&(nx_ctx->stats->aes_ops)); ctr_aes_nx_crypt()
119 &(nx_ctx->stats->aes_bytes)); ctr_aes_nx_crypt()
124 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); ctr_aes_nx_crypt()
133 struct nx_crypto_ctx *nx_ctx = crypto_blkcipher_ctx(desc->tfm); ctr3686_aes_nx_crypt() local
136 memcpy(iv, nx_ctx->priv.ctr.nonce, CTR_RFC3686_IV_SIZE); ctr3686_aes_nx_crypt()
H A Dnx-aes-gcm.c39 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); gcm_aes_nx_set_key() local
40 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; gcm_aes_nx_set_key()
41 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; gcm_aes_nx_set_key()
43 nx_ctx_init(nx_ctx, HCOP_FC_AES); gcm_aes_nx_set_key()
49 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_128]; gcm_aes_nx_set_key()
54 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_192]; gcm_aes_nx_set_key()
59 nx_ctx->ap = &nx_ctx->props[NX_PROPS_AES_256]; gcm_aes_nx_set_key()
78 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(&tfm->base); gcm4106_aes_nx_set_key() local
79 char *nonce = nx_ctx->priv.gcm.nonce; gcm4106_aes_nx_set_key()
124 static int nx_gca(struct nx_crypto_ctx *nx_ctx, nx_gca() argument
129 struct nx_csbcpb *csbcpb_aead = nx_ctx->csbcpb_aead; nx_gca()
131 struct nx_sg *nx_sg = nx_ctx->in_sg; nx_gca()
147 nx_ctx->ap->sglen); nx_gca()
149 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_gca()
157 nx_ctx->ap->databytelen); nx_gca()
161 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, nx_gca()
169 nx_ctx->op_aead.inlen = (nx_ctx->in_sg - nx_sg) nx_gca()
172 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op_aead, nx_gca()
182 atomic_inc(&(nx_ctx->stats->aes_ops)); nx_gca()
183 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); nx_gca()
196 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); gmac() local
197 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; gmac()
210 nx_ctx->ap->sglen); gmac()
212 nx_ctx->ap->databytelen/NX_PAGE_SIZE); gmac()
223 nx_ctx->ap->databytelen); gmac()
227 nx_sg = nx_walk_and_build(nx_ctx->in_sg, max_sg_len, gmac()
235 nx_ctx->op.inlen = (nx_ctx->in_sg - nx_sg) gmac()
241 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, gmac()
253 atomic_inc(&(nx_ctx->stats->aes_ops)); gmac()
254 atomic64_add(req->assoclen, &(nx_ctx->stats->aes_bytes)); gmac()
269 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); gcm_empty() local
270 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; gcm_empty()
292 in_sg = nx_build_sg_list(nx_ctx->in_sg, (u8 *) desc->info, gcm_empty()
293 &len, nx_ctx->ap->sglen); gcm_empty()
299 out_sg = nx_build_sg_list(nx_ctx->out_sg, (u8 *) out, &len, gcm_empty()
300 nx_ctx->ap->sglen); gcm_empty()
305 nx_ctx->op.inlen = (nx_ctx->in_sg - in_sg) * sizeof(struct nx_sg); gcm_empty()
306 nx_ctx->op.outlen = (nx_ctx->out_sg - out_sg) * sizeof(struct nx_sg); gcm_empty()
308 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, gcm_empty()
312 atomic_inc(&(nx_ctx->stats->aes_ops)); gcm_empty()
332 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); gcm_aes_nx_crypt() local
334 struct nx_csbcpb *csbcpb = nx_ctx->csbcpb; gcm_aes_nx_crypt()
341 spin_lock_irqsave(&nx_ctx->lock, irq_flags); gcm_aes_nx_crypt()
361 rc = nx_gca(nx_ctx, req, csbcpb->cpb.aes_gcm.in_pat_or_aad); gcm_aes_nx_crypt()
380 rc = nx_build_sg_lists(nx_ctx, &desc, req->dst, gcm_aes_nx_crypt()
393 rc = nx_hcall_sync(nx_ctx, &nx_ctx->op, gcm_aes_nx_crypt()
406 atomic_inc(&(nx_ctx->stats->aes_ops)); gcm_aes_nx_crypt()
408 &(nx_ctx->stats->aes_bytes)); gcm_aes_nx_crypt()
421 u8 *itag = nx_ctx->priv.gcm.iauth_tag; gcm_aes_nx_crypt()
432 spin_unlock_irqrestore(&nx_ctx->lock, irq_flags); gcm_aes_nx_crypt()
458 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); gcm4106_aes_nx_encrypt() local
461 char *nonce = nx_ctx->priv.gcm.nonce; gcm4106_aes_nx_encrypt()
471 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(req->base.tfm); gcm4106_aes_nx_decrypt() local
474 char *nonce = nx_ctx->priv.gcm.nonce; gcm4106_aes_nx_decrypt()
H A Dnx.c46 * @nx_ctx: the crypto context handle
53 int nx_hcall_sync(struct nx_crypto_ctx *nx_ctx, nx_hcall_sync() argument
60 atomic_inc(&(nx_ctx->stats->sync_ops)); nx_hcall_sync()
69 atomic_inc(&(nx_ctx->stats->errors)); nx_hcall_sync()
70 atomic_set(&(nx_ctx->stats->last_error), op->hcall_err); nx_hcall_sync()
71 atomic_set(&(nx_ctx->stats->last_error_pid), current->pid); nx_hcall_sync()
258 * @nx_ctx: NX crypto context for the lists we're building
271 int nx_build_sg_lists(struct nx_crypto_ctx *nx_ctx, nx_build_sg_lists() argument
281 struct nx_sg *nx_insg = nx_ctx->in_sg; nx_build_sg_lists()
282 struct nx_sg *nx_outsg = nx_ctx->out_sg; nx_build_sg_lists()
285 max_sg_len = min_t(u64, nx_ctx->ap->sglen, nx_build_sg_lists()
288 nx_ctx->ap->databytelen/NX_PAGE_SIZE); nx_build_sg_lists()
293 *nbytes = min_t(u64, *nbytes, nx_ctx->ap->databytelen); nx_build_sg_lists()
306 nx_ctx->op.inlen = trim_sg_list(nx_ctx->in_sg, nx_insg, delta, nbytes); nx_build_sg_lists()
307 nx_ctx->op.outlen = trim_sg_list(nx_ctx->out_sg, nx_outsg, delta, nbytes); nx_build_sg_lists()
313 * nx_ctx_init - initialize an nx_ctx's vio_pfo_op struct
315 * @nx_ctx: the nx context to initialize
318 void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function) nx_ctx_init() argument
320 spin_lock_init(&nx_ctx->lock); nx_ctx_init()
321 memset(nx_ctx->kmem, 0, nx_ctx->kmem_len); nx_ctx_init()
322 nx_ctx->csbcpb->csb.valid |= NX_CSB_VALID_BIT; nx_ctx_init()
324 nx_ctx->op.flags = function; nx_ctx_init()
325 nx_ctx->op.csbcpb = __pa(nx_ctx->csbcpb); nx_ctx_init()
326 nx_ctx->op.in = __pa(nx_ctx->in_sg); nx_ctx_init()
327 nx_ctx->op.out = __pa(nx_ctx->out_sg); nx_ctx_init()
329 if (nx_ctx->csbcpb_aead) { nx_ctx_init()
330 nx_ctx->csbcpb_aead->csb.valid |= NX_CSB_VALID_BIT; nx_ctx_init()
332 nx_ctx->op_aead.flags = function; nx_ctx_init()
333 nx_ctx->op_aead.csbcpb = __pa(nx_ctx->csbcpb_aead); nx_ctx_init()
334 nx_ctx->op_aead.in = __pa(nx_ctx->in_sg); nx_ctx_init()
335 nx_ctx->op_aead.out = __pa(nx_ctx->out_sg); nx_ctx_init()
591 * @nx_ctx: the crypto api context
595 static int nx_crypto_ctx_init(struct nx_crypto_ctx *nx_ctx, u32 fc, u32 mode) nx_crypto_ctx_init() argument
605 nx_ctx->kmem_len = (5 * NX_PAGE_SIZE) + nx_crypto_ctx_init()
608 nx_ctx->kmem_len = (4 * NX_PAGE_SIZE) + nx_crypto_ctx_init()
611 nx_ctx->kmem = kmalloc(nx_ctx->kmem_len, GFP_KERNEL); nx_crypto_ctx_init()
612 if (!nx_ctx->kmem) nx_crypto_ctx_init()
616 nx_ctx->csbcpb = (struct nx_csbcpb *)(round_up((u64)nx_ctx->kmem, nx_crypto_ctx_init()
618 nx_ctx->in_sg = (struct nx_sg *)((u8 *)nx_ctx->csbcpb + NX_PAGE_SIZE); nx_crypto_ctx_init()
619 nx_ctx->out_sg = (struct nx_sg *)((u8 *)nx_ctx->in_sg + NX_PAGE_SIZE); nx_crypto_ctx_init()
622 nx_ctx->csbcpb_aead = nx_crypto_ctx_init()
623 (struct nx_csbcpb *)((u8 *)nx_ctx->out_sg + nx_crypto_ctx_init()
628 nx_ctx->stats = &nx_driver.stats; nx_crypto_ctx_init()
629 memcpy(nx_ctx->props, nx_driver.of.ap[fc][mode], nx_crypto_ctx_init()
689 struct nx_crypto_ctx *nx_ctx = crypto_tfm_ctx(tfm); nx_crypto_ctx_exit() local
691 kzfree(nx_ctx->kmem); nx_crypto_ctx_exit()
692 nx_ctx->csbcpb = NULL; nx_crypto_ctx_exit()
693 nx_ctx->csbcpb_aead = NULL; nx_crypto_ctx_exit()
694 nx_ctx->in_sg = NULL; nx_crypto_ctx_exit()
695 nx_ctx->out_sg = NULL; nx_crypto_ctx_exit()
H A Dnx.h161 void nx_ctx_init(struct nx_crypto_ctx *nx_ctx, unsigned int function);

Completed in 100 milliseconds