Searched refs:alignmask (Results 1 - 15 of 15) sorted by relevance

/linux-4.4.14/crypto/
H A Dcipher.c27 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey_unaligned() local
32 absize = keylen + alignmask; setkey_unaligned()
37 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
49 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey() local
57 if ((unsigned long)key & alignmask) setkey()
68 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_crypt_unaligned() local
70 u8 buffer[size + alignmask]; cipher_crypt_unaligned()
71 u8 *tmp = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); cipher_crypt_unaligned()
81 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_encrypt_unaligned() local
84 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { cipher_encrypt_unaligned()
95 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); cipher_decrypt_unaligned() local
98 if (unlikely(((unsigned long)dst | (unsigned long)src) & alignmask)) { cipher_decrypt_unaligned()
H A Dablkcipher.c152 unsigned int alignmask, ablkcipher_next_slow()
155 unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); ablkcipher_next_slow()
160 n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1); ablkcipher_next_slow()
161 n += (aligned_bsize * 3 - (alignmask + 1) + ablkcipher_next_slow()
162 (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); ablkcipher_next_slow()
170 dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); ablkcipher_next_slow()
191 unsigned int alignmask) ablkcipher_copy_iv()
195 unsigned aligned_bs = ALIGN(bs, alignmask + 1); ablkcipher_copy_iv()
197 (alignmask + 1); ablkcipher_copy_iv()
200 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); ablkcipher_copy_iv()
205 iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1); ablkcipher_copy_iv()
229 unsigned int alignmask, bsize, n; ablkcipher_walk_next() local
233 alignmask = crypto_tfm_alg_alignmask(tfm); ablkcipher_walk_next()
248 !scatterwalk_aligned(&walk->in, alignmask) || ablkcipher_walk_next()
249 !scatterwalk_aligned(&walk->out, alignmask)) { ablkcipher_walk_next()
250 err = ablkcipher_next_slow(req, walk, bsize, alignmask, ablkcipher_walk_next()
274 unsigned int alignmask; ablkcipher_walk_first() local
276 alignmask = crypto_tfm_alg_alignmask(tfm); ablkcipher_walk_first()
286 if (unlikely(((unsigned long)walk->iv & alignmask))) { ablkcipher_walk_first()
287 int err = ablkcipher_copy_iv(walk, tfm, alignmask); ablkcipher_walk_first()
311 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); setkey_unaligned() local
316 absize = keylen + alignmask; setkey_unaligned()
321 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
333 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); setkey() local
340 if ((unsigned long)key & alignmask) setkey()
149 ablkcipher_next_slow(struct ablkcipher_request *req, struct ablkcipher_walk *walk, unsigned int bsize, unsigned int alignmask, void **src_p, void **dst_p) ablkcipher_next_slow() argument
189 ablkcipher_copy_iv(struct ablkcipher_walk *walk, struct crypto_tfm *tfm, unsigned int alignmask) ablkcipher_copy_iv() argument
H A Deseqiv.c153 unsigned long alignmask; eseqiv_init() local
159 alignmask = crypto_tfm_ctx_alignment() - 1; eseqiv_init()
162 if (alignmask & reqsize) { eseqiv_init()
163 alignmask &= reqsize; eseqiv_init()
164 alignmask--; eseqiv_init()
167 alignmask = ~alignmask; eseqiv_init()
168 alignmask &= crypto_ablkcipher_alignmask(geniv); eseqiv_init()
170 reqsize += alignmask; eseqiv_init()
H A Dxcbc.c64 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_setkey() local
67 u8 *consts = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_setkey()
84 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); crypto_xcbc_digest_init() local
87 u8 *prev = PTR_ALIGN(&ctx->ctx[0], alignmask + 1) + bs; crypto_xcbc_digest_init()
99 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_update() local
104 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_update()
145 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_xcbc_digest_final() local
150 u8 *consts = PTR_ALIGN(&tctx->ctx[0], alignmask + 1); crypto_xcbc_digest_final()
151 u8 *odds = PTR_ALIGN(&ctx->ctx[0], alignmask + 1); crypto_xcbc_digest_final()
203 unsigned long alignmask; xcbc_create() local
233 alignmask = alg->cra_alignmask | 3; xcbc_create()
234 inst->alg.base.cra_alignmask = alignmask; xcbc_create()
241 (alignmask & xcbc_create()
246 alignmask + 1) + xcbc_create()
H A Dcmac.c57 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_setkey() local
60 __be64 *consts = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_setkey()
110 unsigned long alignmask = crypto_shash_alignmask(pdesc->tfm); crypto_cmac_digest_init() local
113 u8 *prev = PTR_ALIGN((void *)ctx->ctx, alignmask + 1) + bs; crypto_cmac_digest_init()
125 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_update() local
130 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_update()
171 unsigned long alignmask = crypto_shash_alignmask(parent); crypto_cmac_digest_final() local
176 u8 *consts = PTR_ALIGN((void *)tctx->ctx, alignmask + 1); crypto_cmac_digest_final()
177 u8 *odds = PTR_ALIGN((void *)ctx->ctx, alignmask + 1); crypto_cmac_digest_final()
229 unsigned long alignmask; cmac_create() local
260 alignmask = alg->cra_alignmask | (sizeof(long) - 1); cmac_create()
261 inst->alg.base.cra_alignmask = alignmask; cmac_create()
268 + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)) cmac_create()
272 ALIGN(sizeof(struct cmac_tfm_ctx), alignmask + 1) cmac_create()
H A Dahash.c45 unsigned int alignmask = walk->alignmask; hash_walk_next() local
56 if (offset & alignmask) { hash_walk_next()
57 unsigned int unaligned = alignmask + 1 - (offset & alignmask); hash_walk_next()
86 unsigned int alignmask = walk->alignmask; crypto_hash_walk_done() local
91 if (nbytes && walk->offset & alignmask && !err) { crypto_hash_walk_done()
92 walk->offset = ALIGN(walk->offset, alignmask + 1); crypto_hash_walk_done()
141 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); crypto_hash_walk_first()
159 walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req)); crypto_ahash_walk_first()
181 walk->alignmask = crypto_hash_alignmask(hdesc->tfm); crypto_hash_walk_first_compat()
191 unsigned long alignmask = crypto_ahash_alignmask(tfm); ahash_setkey_unaligned() local
196 absize = keylen + alignmask; ahash_setkey_unaligned()
201 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); ahash_setkey_unaligned()
211 unsigned long alignmask = crypto_ahash_alignmask(tfm); crypto_ahash_setkey() local
213 if ((unsigned long)key & alignmask) crypto_ahash_setkey()
235 unsigned long alignmask = crypto_ahash_alignmask(tfm); ahash_save_req() local
239 priv = kmalloc(sizeof(*priv) + ahash_align_buffer_size(ds, alignmask), ahash_save_req()
279 req->result = PTR_ALIGN((u8 *)priv->ubuf, alignmask + 1); ahash_save_req()
354 unsigned long alignmask = crypto_ahash_alignmask(tfm); crypto_ahash_op() local
356 if ((unsigned long)req->result & alignmask) crypto_ahash_op()
H A Dshash.c37 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_setkey_unaligned() local
42 absize = keylen + (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); shash_setkey_unaligned()
47 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); shash_setkey_unaligned()
58 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_setkey() local
60 if ((unsigned long)key & alignmask) crypto_shash_setkey()
79 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_update_unaligned() local
80 unsigned int unaligned_len = alignmask + 1 - shash_update_unaligned()
81 ((unsigned long)data & alignmask); shash_update_unaligned()
82 u8 ubuf[shash_align_buffer_size(unaligned_len, alignmask)] shash_update_unaligned()
84 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); shash_update_unaligned()
103 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_update() local
105 if ((unsigned long)data & alignmask) crypto_shash_update()
115 unsigned long alignmask = crypto_shash_alignmask(tfm); shash_final_unaligned() local
118 u8 ubuf[shash_align_buffer_size(ds, alignmask)] shash_final_unaligned()
120 u8 *buf = PTR_ALIGN(&ubuf[0], alignmask + 1); shash_final_unaligned()
138 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_final() local
140 if ((unsigned long)out & alignmask) crypto_shash_final()
159 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_finup() local
161 if (((unsigned long)data | (unsigned long)out) & alignmask) crypto_shash_finup()
180 unsigned long alignmask = crypto_shash_alignmask(tfm); crypto_shash_digest() local
182 if (((unsigned long)data | (unsigned long)out) & alignmask) crypto_shash_digest()
H A Dblkcipher.c79 addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); blkcipher_done_slow()
150 unsigned int alignmask) blkcipher_next_slow()
153 unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); blkcipher_next_slow()
162 n = aligned_bsize * 3 - (alignmask + 1) + blkcipher_next_slow()
163 (alignmask & ~(crypto_tfm_ctx_alignment() - 1)); blkcipher_next_slow()
170 alignmask + 1); blkcipher_next_slow()
239 if (!scatterwalk_aligned(&walk->in, walk->alignmask) || blkcipher_walk_next()
240 !scatterwalk_aligned(&walk->out, walk->alignmask)) { blkcipher_walk_next()
254 err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask); blkcipher_walk_next()
279 unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1); blkcipher_copy_iv()
282 (walk->alignmask + 1); blkcipher_copy_iv()
285 size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1); blkcipher_copy_iv()
290 iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1); blkcipher_copy_iv()
306 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_virt()
318 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_phys()
335 if (unlikely(((unsigned long)walk->iv & walk->alignmask))) { blkcipher_walk_first()
356 walk->alignmask = crypto_blkcipher_alignmask(desc->tfm); blkcipher_walk_virt_block()
370 walk->alignmask = crypto_aead_alignmask(tfm); blkcipher_aead_walk_virt_block()
379 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey_unaligned() local
384 absize = keylen + alignmask; setkey_unaligned()
389 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
400 unsigned long alignmask = crypto_tfm_alg_alignmask(tfm); setkey() local
407 if ((unsigned long)key & alignmask) setkey()
147 blkcipher_next_slow(struct blkcipher_desc *desc, struct blkcipher_walk *walk, unsigned int bsize, unsigned int alignmask) blkcipher_next_slow() argument
H A Dkeywrap.c143 unsigned long alignmask = max_t(unsigned long, SEMIBSIZE, crypto_kw_decrypt() local
147 u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask]; crypto_kw_decrypt()
149 PTR_ALIGN(blockbuf + 0, alignmask + 1); crypto_kw_decrypt()
174 u8 tbe_buffer[SEMIBSIZE + alignmask]; crypto_kw_decrypt()
176 u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1); crypto_kw_decrypt()
228 unsigned long alignmask = max_t(unsigned long, SEMIBSIZE, crypto_kw_encrypt() local
232 u8 blockbuf[sizeof(struct crypto_kw_block) + alignmask]; crypto_kw_encrypt()
234 PTR_ALIGN(blockbuf + 0, alignmask + 1); crypto_kw_encrypt()
263 u8 tbe_buffer[SEMIBSIZE + alignmask]; crypto_kw_encrypt()
264 u8 *tbe = PTR_ALIGN(tbe_buffer + 0, alignmask + 1); crypto_kw_encrypt()
H A Dctr.c59 unsigned long alignmask = crypto_cipher_alignmask(tfm); crypto_ctr_crypt_final() local
61 u8 tmp[bsize + alignmask]; crypto_ctr_crypt_final()
62 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); crypto_ctr_crypt_final()
106 unsigned long alignmask = crypto_cipher_alignmask(tfm); crypto_ctr_crypt_inplace() local
110 u8 tmp[bsize + alignmask]; crypto_ctr_crypt_inplace()
111 u8 *keystream = PTR_ALIGN(tmp + 0, alignmask + 1); crypto_ctr_crypt_inplace()
H A Daead.c34 unsigned long alignmask = crypto_aead_alignmask(tfm); setkey_unaligned() local
39 absize = keylen + alignmask; setkey_unaligned()
44 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); setkey_unaligned()
55 unsigned long alignmask = crypto_aead_alignmask(tfm); crypto_aead_setkey() local
57 if ((unsigned long)key & alignmask) crypto_aead_setkey()
H A Dtestmgr.c403 unsigned int alignmask; test_hash() local
415 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_hash()
416 if (alignmask) { test_hash()
419 alignmask + 1); test_hash()
820 unsigned int alignmask; test_aead() local
838 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_aead()
839 if (alignmask) { test_aead()
842 alignmask + 1); test_aead()
1186 unsigned int alignmask; test_skcipher() local
1204 alignmask = crypto_tfm_alg_alignmask(&tfm->base); test_skcipher()
1205 if (alignmask) { test_skcipher()
1208 alignmask + 1); test_skcipher()
/linux-4.4.14/include/crypto/
H A Dscatterwalk.h71 unsigned int alignmask) scatterwalk_aligned()
73 return !(walk->offset & alignmask); scatterwalk_aligned()
70 scatterwalk_aligned(struct scatter_walk *walk, unsigned int alignmask) scatterwalk_aligned() argument
H A Dalgapi.h111 unsigned int alignmask; member in struct:blkcipher_walk
/linux-4.4.14/include/crypto/internal/
H A Dhash.h26 unsigned int alignmask; member in struct:crypto_hash_walk

Completed in 242 milliseconds