Searched refs:alignmask (Results 1 - 14 of 14) sorted by relevance

/linux-4.1.27/crypto/
H A Dcipher.c27 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey_unaligned() local
32 absize = keylen + alignmask; setkey_unaligned()
37 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
49 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey() local
57 if ((unsigned long)key & alignmask) setkey()
68 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_crypt_unaligned() local
70 u8 buffer[size + alignmask]; cipher_crypt_unaligned()
71 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); cipher_crypt_unaligned()
81 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_encrypt_unaligned() local
84 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { cipher_encrypt_unaligned()
95 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_decrypt_unaligned() local
98 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { cipher_decrypt_unaligned()
H A Dablkcipher.c152 unsigned int alignmask, ablkcipher_next_slow()
155 unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); ablkcipher_next_slow()
160 n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1); ablkcipher_next_slow()
161 n += (aligned_bsize * 3 - (alignmask + 1) + ablkcipher_next_slow()
162 (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); ablkcipher_next_slow()
170 dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); ablkcipher_next_slow()
191 unsigned int alignmask) ablkcipher_copy_iv()
195 unsigned aligned_bs = ALIGN(bs, alignmask + 1); ablkcipher_copy_iv()
197 (alignmask + 1); ablkcipher_copy_iv()
200 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); ablkcipher_copy_iv()
205 iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1); ablkcipher_copy_iv()
229 unsigned int alignmask, bsize, n; ablkcipher_walk_next() local
233 alignmask = crypto_tfm_alg_alignmask(tfm); ablkcipher_walk_next()
248 !scatterwalk_aligned(&walk->in, alignmask) || ablkcipher_walk_next()
249 !scatterwalk_aligned(&walk->out, alignmask)) { ablkcipher_walk_next()
250 err = ablkcipher_next_slow(req, walk, bsize, alignmask, ablkcipher_walk_next()
274 unsigned int alignmask; ablkcipher_walk_first() local
276 alignmask = crypto_tfm_alg_alignmask(tfm); ablkcipher_walk_first()
286 if (unlikely(((unsigned long)walk->iv & alignmask))) { ablkcipher_walk_first()
287 int err = ablkcipher_copy_iv(walk, tfm, alignmask); ablkcipher_walk_first()
311 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); setkey_unaligned() local
316 absize = keylen + alignmask; setkey_unaligned()
321 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
333 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); setkey() local
340 if ((unsigned long)key & alignmask) setkey()
149 ablkcipher_next_slow(struct ablkcipher_request *req, struct ablkcipher_walk *walk, unsigned int bsize, unsigned int alignmask, void **src_p, void **dst_p) ablkcipher_next_slow() argument
189 ablkcipher_copy_iv(struct ablkcipher_walk *walk, struct crypto_tfm *tfm, unsigned int alignmask) ablkcipher_copy_iv() argument
H A Dcmac.c57 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_setkey() local
60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_setkey()
110 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); crypto_cmac_digest_init() local
113 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; crypto_cmac_digest_init()
125 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_update() local
130 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_update()
171 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_final() local
176 u8 *consts = PTR_ALIGN((void *)tctx->ctx, alignmask + 1); crypto_cmac_digest_final()
177 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_final()
229 unsigned long alignmask; cmac_create() local
260 alignmask = alg->cra_alignmask | (sizeof(long) - 1); cmac_create()
261 inst->alg.base.cra_alignmask = alignmask; cmac_create()
268 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) cmac_create()
272 ALIGN(sizeof(struct cmac_tfm_ctx), alignmask + 1) cmac_create()
H A Dxcbc.c64 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_setkey() local
67 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_setkey()
84 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); crypto_xcbc_digest_init() local
87 u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs; crypto_xcbc_digest_init()
99 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_update() local
104 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_update()
145 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_final() local
150 u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); crypto_xcbc_digest_final()
151 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_final()
203 unsigned long alignmask; xcbc_create() local
233 alignmask = alg->cra_alignmask | 3; xcbc_create()
234 inst->alg.base.cra_alignmask = alignmask; xcbc_create()
241 (alignmask & xcbc_create()
246 alignmask + 1) + xcbc_create()
H A Dahash.c45 unsigned int alignmask = walk->alignmask; hash_walk_next() local
56 if (offset & alignmask) { hash_walk_next()
57 unsigned int unaligned = alignmask + 1 - (offset & alignmask); hash_walk_next()
86 unsigned int alignmask = walk->alignmask; crypto_hash_walk_done() local
91 if (nbytes && walk->offset & alignmask && !err) { crypto_hash_walk_done()
92 walk->offset = ALIGN(walk->offset, alignmask + 1); crypto_hash_walk_done()
141 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); crypto_hash_walk_first()
159 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); crypto_ahash_walk_first()
181 walk->alignmask = crypto_hash_alignmask(hdesc->tfm); crypto_hash_walk_first_compat()
191 unsigned long alignmask = crypto_ahash_alignmask(tfm); ahash_setkey_unaligned() local
196 absize = keylen + alignmask; ahash_setkey_unaligned()
201 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); ahash_setkey_unaligned()
211 unsigned long alignmask = crypto_ahash_alignmask(tfm); crypto_ahash_setkey() local
213 if ((unsigned long)key & alignmask) crypto_ahash_setkey()
235 unsigned long alignmask = crypto_ahash_alignmask(tfm); ahash_save_req() local
239 priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), ahash_save_req()
279 req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); ahash_save_req()
354 unsigned long alignmask = crypto_ahash_alignmask(tfm); crypto_ahash_op() local
356 if ((unsigned long)req->result & alignmask) crypto_ahash_op()
H A Deseqiv.c176 unsigned long alignmask; eseqiv_init() local
181 alignmask = crypto_tfm_ctx_alignment() - 1; eseqiv_init()
184 if (alignmask & reqsize) { eseqiv_init()
185 alignmask &= reqsize; eseqiv_init()
186 alignmask--; eseqiv_init()
189 alignmask = ~alignmask; eseqiv_init()
190 alignmask &= crypto_ablkcipher_alignmask(geniv); eseqiv_init()
192 reqsize += alignmask; eseqiv_init()
H A Dblkcipher.c78 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); blkcipher_done_slow()
149 unsigned int alignmask) blkcipher_next_slow()
152 unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); blkcipher_next_slow()
161 n = aligned_bsize * 3 - (alignmask + 1) + blkcipher_next_slow()
162 (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); blkcipher_next_slow()
169 alignmask + 1); blkcipher_next_slow()
238 if (!scatterwalk_aligned(&walk->in, walk->alignmask) || blkcipher_walk_next()
239 !scatterwalk_aligned(&walk->out, walk->alignmask)) { blkcipher_walk_next()
253 err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask); blkcipher_walk_next()
278 unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1); blkcipher_copy_iv()
281 (walk->alignmask + 1); blkcipher_copy_iv()
284 size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1); blkcipher_copy_iv()
289 iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); blkcipher_copy_iv()
305 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_virt()
317 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_phys()
334 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) { blkcipher_walk_first()
355 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_virt_block()
369 walk->alignmask = crypto_aead_alignmask(tfm); blkcipher_aead_walk_virt_block()
378 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey_unaligned() local
383 absize = keylen + alignmask; setkey_unaligned()
388 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
399 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey() local
406 if ((unsigned long)key & alignmask) setkey()
146 blkcipher_next_slow(struct blkcipher_desc *desc, struct blkcipher_walk *walk, unsigned int bsize, unsigned int alignmask) blkcipher_next_slow() argument
H A Dshash.c37 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_setkey_unaligned() local
42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); shash_setkey_unaligned()
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); shash_setkey_unaligned()
58 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_setkey() local
60 if ((unsigned long)key & alignmask) crypto_shash_setkey()
79 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_update_unaligned() local
80 unsigned int unaligned_len = alignmask + 1 - shash_update_unaligned()
81 ((unsigned long)data & alignmask); shash_update_unaligned()
82 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] shash_update_unaligned()
84 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); shash_update_unaligned()
103 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_update() local
105 if ((unsigned long)data & alignmask) crypto_shash_update()
115 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_final_unaligned() local
118 u8 ubuf[shash_align_buffer_size(ds, alignmask)] shash_final_unaligned()
120 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); shash_final_unaligned()
138 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_final() local
140 if ((unsigned long)out & alignmask) crypto_shash_final()
159 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_finup() local
161 if (((unsigned long)data | (unsigned long)out) & alignmask) crypto_shash_finup()
180 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_digest() local
182 if (((unsigned long)data | (unsigned long)out) & alignmask) crypto_shash_digest()
H A Dctr.c59 unsigned long alignmask = crypto_cipher_alignmask(tfm); crypto_ctr_crypt_final() local
61 u8 tmp[bsize + alignmask]; crypto_ctr_crypt_final()
62 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); crypto_ctr_crypt_final()
106 unsigned long alignmask = crypto_cipher_alignmask(tfm); crypto_ctr_crypt_inplace() local
110 u8 tmp[bsize + alignmask]; crypto_ctr_crypt_inplace()
111 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); crypto_ctr_crypt_inplace()
H A Daead.c33 unsigned long alignmask = crypto_aead_alignmask(tfm); setkey_unaligned() local
38 absize = keylen + alignmask; setkey_unaligned()
43 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
54 unsigned long alignmask = crypto_aead_alignmask(tfm); setkey() local
56 if ((unsigned long)key & alignmask) setkey()
H A Dtestmgr.c393 unsigned int alignmask; test_hash() local
405 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_hash()
406 if (alignmask) { test_hash()
409 alignmask + 1); test_hash()
801 unsigned int alignmask; test_aead() local
819 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_aead()
820 if (alignmask) { test_aead()
823 alignmask + 1); test_aead()
1154 unsigned int alignmask; test_skcipher() local
1172 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_skcipher()
1173 if (alignmask) { test_skcipher()
1176 alignmask + 1); test_skcipher()
/linux-4.1.27/include/crypto/
H A Dscatterwalk.h79 unsigned int alignmask) scatterwalk_aligned()
81 return !(walk->offset & alignmask); scatterwalk_aligned()
78 scatterwalk_aligned(struct scatter_walk *walk, unsigned int alignmask) scatterwalk_aligned() argument
H A Dalgapi.h108 unsigned int alignmask; member in struct:blkcipher_walk
/linux-4.1.27/include/crypto/internal/
H A Dhash.h26 unsigned int alignmask; member in struct:crypto_hash_walk

Completed in 171 milliseconds