Searched refs:PADLOCK_ALIGNMENT (Results 1 – 4 of 4) sorted by relevance
48 } __attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));59 __attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));61 __attribute__ ((__aligned__(PADLOCK_ALIGNMENT)));87 unsigned long align = PADLOCK_ALIGNMENT; in aes_ctx_common()214 u8 buf[AES_BLOCK_SIZE * (MAX_ECB_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in ecb_crypt_copy()215 u8 *tmp = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in ecb_crypt_copy()228 u8 buf[AES_BLOCK_SIZE * (MAX_CBC_FETCH_BLOCKS - 1) + PADLOCK_ALIGNMENT - 1]; in cbc_crypt_copy()229 u8 *tmp = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in cbc_crypt_copy()329 .cra_alignmask = PADLOCK_ALIGNMENT - 1,407 .cra_alignmask = PADLOCK_ALIGNMENT - 1,[all …]
85 char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ in padlock_sha1_finup()87 char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in padlock_sha1_finup()151 char buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ in padlock_sha256_finup()153 char *result = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in padlock_sha256_finup()312 u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ in padlock_sha1_update_nano()314 u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in padlock_sha1_update_nano()401 u8 buf[128 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ in padlock_sha256_update_nano()403 u8 *dst = PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in padlock_sha256_update_nano()
93 char buf[16 + PADLOCK_ALIGNMENT - STACK_ALIGN] __attribute__ in via_rng_data_present()95 u32 *via_rng_datum = (u32 *)PTR_ALIGN(&buf[0], PADLOCK_ALIGNMENT); in via_rng_data_present()
16 #define PADLOCK_ALIGNMENT 16 macro