Lines Matching refs:alignmask
152 unsigned int alignmask, in ablkcipher_next_slow() argument
155 unsigned aligned_bsize = ALIGN(bsize, alignmask + 1); in ablkcipher_next_slow()
160 n = ALIGN(sizeof(struct ablkcipher_buffer), alignmask + 1); in ablkcipher_next_slow()
161 n += (aligned_bsize * 3 - (alignmask + 1) + in ablkcipher_next_slow()
162 (alignmask & ~(crypto_tfm_ctx_alignment() - 1))); in ablkcipher_next_slow()
170 dst = (u8 *)ALIGN((unsigned long)base, alignmask + 1); in ablkcipher_next_slow()
191 unsigned int alignmask) in ablkcipher_copy_iv() argument
195 unsigned aligned_bs = ALIGN(bs, alignmask + 1); in ablkcipher_copy_iv()
197 (alignmask + 1); in ablkcipher_copy_iv()
200 size += alignmask & ~(crypto_tfm_ctx_alignment() - 1); in ablkcipher_copy_iv()
205 iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1); in ablkcipher_copy_iv()
229 unsigned int alignmask, bsize, n; in ablkcipher_walk_next() local
233 alignmask = crypto_tfm_alg_alignmask(tfm); in ablkcipher_walk_next()
248 !scatterwalk_aligned(&walk->in, alignmask) || in ablkcipher_walk_next()
249 !scatterwalk_aligned(&walk->out, alignmask)) { in ablkcipher_walk_next()
250 err = ablkcipher_next_slow(req, walk, bsize, alignmask, in ablkcipher_walk_next()
274 unsigned int alignmask; in ablkcipher_walk_first() local
276 alignmask = crypto_tfm_alg_alignmask(tfm); in ablkcipher_walk_first()
286 if (unlikely(((unsigned long)walk->iv & alignmask))) { in ablkcipher_walk_first()
287 int err = ablkcipher_copy_iv(walk, tfm, alignmask); in ablkcipher_walk_first()
311 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); in setkey_unaligned() local
316 absize = keylen + alignmask; in setkey_unaligned()
321 alignbuffer = (u8 *)ALIGN((unsigned long)buffer, alignmask + 1); in setkey_unaligned()
333 unsigned long alignmask = crypto_ablkcipher_alignmask(tfm); in setkey() local
340 if ((unsigned long)key & alignmask) in setkey()