GHASH_BLOCK_SIZE   40 arch/arm/crypto/ghash-ce-glue.c 	u8 buf[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   86 arch/arm/crypto/ghash-ce-glue.c 				src += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE   89 arch/arm/crypto/ghash-ce-glue.c 			crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  102 arch/arm/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  106 arch/arm/crypto/ghash-ce-glue.c 	if ((partial + len) >= GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  111 arch/arm/crypto/ghash-ce-glue.c 			int p = GHASH_BLOCK_SIZE - partial;
GHASH_BLOCK_SIZE  118 arch/arm/crypto/ghash-ce-glue.c 		blocks = len / GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  119 arch/arm/crypto/ghash-ce-glue.c 		len %= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  123 arch/arm/crypto/ghash-ce-glue.c 		src += blocks * GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  134 arch/arm/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  139 arch/arm/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
GHASH_BLOCK_SIZE  166 arch/arm/crypto/ghash-ce-glue.c 	if (keylen != GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  172 arch/arm/crypto/ghash-ce-glue.c 	memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  199 arch/arm/crypto/ghash-ce-glue.c 	.base.cra_blocksize	= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE  349 arch/arm/crypto/ghash-ce-glue.c 		.cra_blocksize	= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE   44 arch/arm64/crypto/ghash-ce-glue.c 	u8 buf[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE  103 arch/arm64/crypto/ghash-ce-glue.c 				src += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  106 arch/arm64/crypto/ghash-ce-glue.c 			crypto_xor((u8 *)&dst, in, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  116 arch/arm64/crypto/ghash-ce-glue.c #define MAX_BLOCKS	(SZ_64K / GHASH_BLOCK_SIZE)
GHASH_BLOCK_SIZE  126 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  130 arch/arm64/crypto/ghash-ce-glue.c 	if ((partial + len) >= GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  135 arch/arm64/crypto/ghash-ce-glue.c 			int p = GHASH_BLOCK_SIZE - partial;
GHASH_BLOCK_SIZE  142 arch/arm64/crypto/ghash-ce-glue.c 		blocks = len / GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  143 arch/arm64/crypto/ghash-ce-glue.c 		len %= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  153 arch/arm64/crypto/ghash-ce-glue.c 			src += chunk * GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  177 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  182 arch/arm64/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
GHASH_BLOCK_SIZE  197 arch/arm64/crypto/ghash-ce-glue.c 	unsigned int partial = ctx->count % GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  202 arch/arm64/crypto/ghash-ce-glue.c 		memset(ctx->buf + partial, 0, GHASH_BLOCK_SIZE - partial);
GHASH_BLOCK_SIZE  231 arch/arm64/crypto/ghash-ce-glue.c 	memcpy(&key->k, inkey, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  253 arch/arm64/crypto/ghash-ce-glue.c 	if (keylen != GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  265 arch/arm64/crypto/ghash-ce-glue.c 	.base.cra_blocksize	= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE  279 arch/arm64/crypto/ghash-ce-glue.c 	.base.cra_blocksize	= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE  307 arch/arm64/crypto/ghash-ce-glue.c 	u8 key[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE  338 arch/arm64/crypto/ghash-ce-glue.c 		int buf_added = min(count, GHASH_BLOCK_SIZE - *buf_count);
GHASH_BLOCK_SIZE  347 arch/arm64/crypto/ghash-ce-glue.c 	if (count >= GHASH_BLOCK_SIZE || *buf_count == GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  348 arch/arm64/crypto/ghash-ce-glue.c 		int blocks = count / GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  354 arch/arm64/crypto/ghash-ce-glue.c 		src += blocks * GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  355 arch/arm64/crypto/ghash-ce-glue.c 		count %= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  369 arch/arm64/crypto/ghash-ce-glue.c 	u8 buf[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE  395 arch/arm64/crypto/ghash-ce-glue.c 		memset(&buf[buf_count], 0, GHASH_BLOCK_SIZE - buf_count);
GHASH_BLOCK_SIZE  504 arch/arm64/crypto/ghash-ce-glue.c 		u8 buf[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE  512 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes > GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  514 arch/arm64/crypto/ghash-ce-glue.c 			dst += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  515 arch/arm64/crypto/ghash-ce-glue.c 			nbytes %= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  519 arch/arm64/crypto/ghash-ce-glue.c 		memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
GHASH_BLOCK_SIZE  546 arch/arm64/crypto/ghash-ce-glue.c 	u8 buf[2 * GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE  644 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes > GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  646 arch/arm64/crypto/ghash-ce-glue.c 			src += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  647 arch/arm64/crypto/ghash-ce-glue.c 			nbytes %= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  651 arch/arm64/crypto/ghash-ce-glue.c 		memset(buf + nbytes, 0, GHASH_BLOCK_SIZE - nbytes);
GHASH_BLOCK_SIZE 1010 arch/s390/crypto/aes_s390.c 		u8 j0[GHASH_BLOCK_SIZE];/* initial counter value */
GHASH_BLOCK_SIZE 1105 arch/s390/crypto/aes_s390.c 	.ivsize			= GHASH_BLOCK_SIZE - sizeof(u32),
GHASH_BLOCK_SIZE   20 arch/s390/crypto/ghash_s390.c 	u8 key[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   24 arch/s390/crypto/ghash_s390.c 	u8 icv[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   25 arch/s390/crypto/ghash_s390.c 	u8 key[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   26 arch/s390/crypto/ghash_s390.c 	u8 buffer[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   36 arch/s390/crypto/ghash_s390.c 	memcpy(dctx->key, ctx->key, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   46 arch/s390/crypto/ghash_s390.c 	if (keylen != GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE   51 arch/s390/crypto/ghash_s390.c 	memcpy(ctx->key, key, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   64 arch/s390/crypto/ghash_s390.c 		u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE   75 arch/s390/crypto/ghash_s390.c 				   GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   79 arch/s390/crypto/ghash_s390.c 	n = srclen & ~(GHASH_BLOCK_SIZE - 1);
GHASH_BLOCK_SIZE   87 arch/s390/crypto/ghash_s390.c 		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
GHASH_BLOCK_SIZE   99 arch/s390/crypto/ghash_s390.c 		u8 *pos = buf + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE  102 arch/s390/crypto/ghash_s390.c 		cpacf_kimd(CPACF_KIMD_GHASH, dctx, buf, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  116 arch/s390/crypto/ghash_s390.c 		memcpy(dst, dctx->icv, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  131 arch/s390/crypto/ghash_s390.c 		.cra_blocksize		= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE   40 arch/x86/crypto/ghash-clmulni-intel_glue.c 	u8 buffer[GHASH_BLOCK_SIZE];
GHASH_BLOCK_SIZE   60 arch/x86/crypto/ghash-clmulni-intel_glue.c 	if (keylen != GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE   88 arch/x86/crypto/ghash-clmulni-intel_glue.c 		u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE  106 arch/x86/crypto/ghash-clmulni-intel_glue.c 		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
GHASH_BLOCK_SIZE  119 arch/x86/crypto/ghash-clmulni-intel_glue.c 		u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE  139 arch/x86/crypto/ghash-clmulni-intel_glue.c 	memcpy(dst, buf, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  156 arch/x86/crypto/ghash-clmulni-intel_glue.c 		.cra_blocksize		= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE  314 arch/x86/crypto/ghash-clmulni-intel_glue.c 			.cra_blocksize		= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE   61 crypto/ghash-generic.c 	if (keylen != GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE   69 crypto/ghash-generic.c 	BUILD_BUG_ON(sizeof(k) != GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   70 crypto/ghash-generic.c 	memcpy(&k, key, GHASH_BLOCK_SIZE); /* avoid violating alignment rules */
GHASH_BLOCK_SIZE   72 crypto/ghash-generic.c 	memzero_explicit(&k, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   89 crypto/ghash-generic.c 		u8 *pos = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE  101 crypto/ghash-generic.c 	while (srclen >= GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  102 crypto/ghash-generic.c 		crypto_xor(dst, src, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  104 crypto/ghash-generic.c 		src += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  105 crypto/ghash-generic.c 		srclen -= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  109 crypto/ghash-generic.c 		dctx->bytes = GHASH_BLOCK_SIZE - srclen;
GHASH_BLOCK_SIZE  122 crypto/ghash-generic.c 		u8 *tmp = dst + (GHASH_BLOCK_SIZE - dctx->bytes);
GHASH_BLOCK_SIZE  140 crypto/ghash-generic.c 	memcpy(dst, buf, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  163 crypto/ghash-generic.c 		.cra_blocksize		= GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE 2133 drivers/crypto/inside-secure/safexcel_cipher.c 	ctx->state_sz = GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE   59 drivers/crypto/vmx/ghash.c 	if (keylen != GHASH_BLOCK_SIZE)
GHASH_BLOCK_SIZE   70 drivers/crypto/vmx/ghash.c 	memcpy(&ctx->key, key, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE   88 drivers/crypto/vmx/ghash.c 		crypto_xor((u8 *)dctx->shash, dctx->buffer, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  107 drivers/crypto/vmx/ghash.c 		while (srclen >= GHASH_BLOCK_SIZE) {
GHASH_BLOCK_SIZE  108 drivers/crypto/vmx/ghash.c 			crypto_xor((u8 *)dctx->shash, src, GHASH_BLOCK_SIZE);
GHASH_BLOCK_SIZE  110 drivers/crypto/vmx/ghash.c 			srclen -= GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  111 drivers/crypto/vmx/ghash.c 			src += GHASH_BLOCK_SIZE;
GHASH_BLOCK_SIZE  180 drivers/crypto/vmx/ghash.c 		 .cra_blocksize = GHASH_BLOCK_SIZE,
GHASH_BLOCK_SIZE   20 include/crypto/ghash.h 	u8 buffer[GHASH_BLOCK_SIZE];