rkp              2304 fs/xfs/libxfs/xfs_btree.c 	union xfs_btree_key	*rkp = NULL;	/* right btree key */
rkp              2368 fs/xfs/libxfs/xfs_btree.c 		rkp = xfs_btree_key_addr(cur, 1, right);
rkp              2377 fs/xfs/libxfs/xfs_btree.c 		xfs_btree_copy_keys(cur, lkp, rkp, 1);
rkp              2498 fs/xfs/libxfs/xfs_btree.c 	union xfs_btree_key	*rkp;		/* right btree key */
rkp              2555 fs/xfs/libxfs/xfs_btree.c 		rkp = xfs_btree_key_addr(cur, 1, right);
rkp              2564 fs/xfs/libxfs/xfs_btree.c 		xfs_btree_shift_keys(cur, rkp, 1, rrecs);
rkp              2572 fs/xfs/libxfs/xfs_btree.c 		xfs_btree_copy_keys(cur, rkp, lkp, 1);
rkp              2578 fs/xfs/libxfs/xfs_btree.c 		ASSERT(cur->bc_ops->keys_inorder(cur, rkp,
rkp              2732 fs/xfs/libxfs/xfs_btree.c 		union xfs_btree_key	*rkp;	/* right btree key */
rkp              2737 fs/xfs/libxfs/xfs_btree.c 		rkp = xfs_btree_key_addr(cur, 1, right);
rkp              2747 fs/xfs/libxfs/xfs_btree.c 		xfs_btree_copy_keys(cur, rkp, lkp, rrecs);
rkp              4050 fs/xfs/libxfs/xfs_btree.c 		union xfs_btree_key	*rkp;	/* right btree key */
rkp              4055 fs/xfs/libxfs/xfs_btree.c 		rkp = xfs_btree_key_addr(cur, 1, right);
rkp              4064 fs/xfs/libxfs/xfs_btree.c 		xfs_btree_copy_keys(cur, lkp, rkp, rrecs);
rkp               260 lib/crypto/aes.c 	const u32 *rkp = ctx->key_enc + 4;
rkp               281 lib/crypto/aes.c 	for (round = 0;; round += 2, rkp += 8) {
rkp               282 lib/crypto/aes.c 		st1[0] = mix_columns(subshift(st0, 0)) ^ rkp[0];
rkp               283 lib/crypto/aes.c 		st1[1] = mix_columns(subshift(st0, 1)) ^ rkp[1];
rkp               284 lib/crypto/aes.c 		st1[2] = mix_columns(subshift(st0, 2)) ^ rkp[2];
rkp               285 lib/crypto/aes.c 		st1[3] = mix_columns(subshift(st0, 3)) ^ rkp[3];
rkp               290 lib/crypto/aes.c 		st0[0] = mix_columns(subshift(st1, 0)) ^ rkp[4];
rkp               291 lib/crypto/aes.c 		st0[1] = mix_columns(subshift(st1, 1)) ^ rkp[5];
rkp               292 lib/crypto/aes.c 		st0[2] = mix_columns(subshift(st1, 2)) ^ rkp[6];
rkp               293 lib/crypto/aes.c 		st0[3] = mix_columns(subshift(st1, 3)) ^ rkp[7];
rkp               296 lib/crypto/aes.c 	put_unaligned_le32(subshift(st1, 0) ^ rkp[4], out);
rkp               297 lib/crypto/aes.c 	put_unaligned_le32(subshift(st1, 1) ^ rkp[5], out + 4);
rkp               298 lib/crypto/aes.c 	put_unaligned_le32(subshift(st1, 2) ^ rkp[6], out + 8);
rkp               299 lib/crypto/aes.c 	put_unaligned_le32(subshift(st1, 3) ^ rkp[7], out + 12);
rkp               311 lib/crypto/aes.c 	const u32 *rkp = ctx->key_dec + 4;
rkp               332 lib/crypto/aes.c 	for (round = 0;; round += 2, rkp += 8) {
rkp               333 lib/crypto/aes.c 		st1[0] = inv_mix_columns(inv_subshift(st0, 0)) ^ rkp[0];
rkp               334 lib/crypto/aes.c 		st1[1] = inv_mix_columns(inv_subshift(st0, 1)) ^ rkp[1];
rkp               335 lib/crypto/aes.c 		st1[2] = inv_mix_columns(inv_subshift(st0, 2)) ^ rkp[2];
rkp               336 lib/crypto/aes.c 		st1[3] = inv_mix_columns(inv_subshift(st0, 3)) ^ rkp[3];
rkp               341 lib/crypto/aes.c 		st0[0] = inv_mix_columns(inv_subshift(st1, 0)) ^ rkp[4];
rkp               342 lib/crypto/aes.c 		st0[1] = inv_mix_columns(inv_subshift(st1, 1)) ^ rkp[5];
rkp               343 lib/crypto/aes.c 		st0[2] = inv_mix_columns(inv_subshift(st1, 2)) ^ rkp[6];
rkp               344 lib/crypto/aes.c 		st0[3] = inv_mix_columns(inv_subshift(st1, 3)) ^ rkp[7];
rkp               347 lib/crypto/aes.c 	put_unaligned_le32(inv_subshift(st1, 0) ^ rkp[4], out);
rkp               348 lib/crypto/aes.c 	put_unaligned_le32(inv_subshift(st1, 1) ^ rkp[5], out + 4);
rkp               349 lib/crypto/aes.c 	put_unaligned_le32(inv_subshift(st1, 2) ^ rkp[6], out + 8);
rkp               350 lib/crypto/aes.c 	put_unaligned_le32(inv_subshift(st1, 3) ^ rkp[7], out + 12);