walk              181 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              185 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              187 arch/arm/crypto/aes-ce-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              189 arch/arm/crypto/aes-ce-glue.c 		ce_aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              192 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              201 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              205 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              207 arch/arm/crypto/aes-ce-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              209 arch/arm/crypto/aes-ce-glue.c 		ce_aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              212 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              218 arch/arm/crypto/aes-ce-glue.c 			    struct skcipher_walk *walk)
walk              225 arch/arm/crypto/aes-ce-glue.c 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
walk              227 arch/arm/crypto/aes-ce-glue.c 		ce_aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
walk              229 arch/arm/crypto/aes-ce-glue.c 				   walk->iv);
walk              231 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
walk              238 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              241 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              244 arch/arm/crypto/aes-ce-glue.c 	return cbc_encrypt_walk(req, &walk);
walk              248 arch/arm/crypto/aes-ce-glue.c 			    struct skcipher_walk *walk)
walk              255 arch/arm/crypto/aes-ce-glue.c 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
walk              257 arch/arm/crypto/aes-ce-glue.c 		ce_aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
walk              259 arch/arm/crypto/aes-ce-glue.c 				   walk->iv);
walk              261 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
walk              268 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              271 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              274 arch/arm/crypto/aes-ce-glue.c 	return cbc_decrypt_walk(req, &walk);
walk              285 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              303 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
walk              304 arch/arm/crypto/aes-ce-glue.c 		      cbc_encrypt_walk(&subreq, &walk);
walk              322 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              327 arch/arm/crypto/aes-ce-glue.c 	ce_aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              328 arch/arm/crypto/aes-ce-glue.c 			       ctx->key_enc, num_rounds(ctx), walk.nbytes,
walk              329 arch/arm/crypto/aes-ce-glue.c 			       walk.iv);
walk              332 arch/arm/crypto/aes-ce-glue.c 	return skcipher_walk_done(&walk, 0);
walk              343 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              361 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
walk              362 arch/arm/crypto/aes-ce-glue.c 		      cbc_decrypt_walk(&subreq, &walk);
walk              380 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              385 arch/arm/crypto/aes-ce-glue.c 	ce_aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              386 arch/arm/crypto/aes-ce-glue.c 			       ctx->key_dec, num_rounds(ctx), walk.nbytes,
walk              387 arch/arm/crypto/aes-ce-glue.c 			       walk.iv);
walk              390 arch/arm/crypto/aes-ce-glue.c 	return skcipher_walk_done(&walk, 0);
walk              397 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              400 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              402 arch/arm/crypto/aes-ce-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              404 arch/arm/crypto/aes-ce-glue.c 		ce_aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              406 arch/arm/crypto/aes-ce-glue.c 				   walk.iv);
walk              408 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              410 arch/arm/crypto/aes-ce-glue.c 	if (walk.nbytes) {
walk              412 arch/arm/crypto/aes-ce-glue.c 		unsigned int nbytes = walk.nbytes;
walk              413 arch/arm/crypto/aes-ce-glue.c 		u8 *tdst = walk.dst.virt.addr;
walk              414 arch/arm/crypto/aes-ce-glue.c 		u8 *tsrc = walk.src.virt.addr;
walk              423 arch/arm/crypto/aes-ce-glue.c 				   blocks, walk.iv);
walk              426 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, 0);
walk              463 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              468 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              470 arch/arm/crypto/aes-ce-glue.c 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
walk              474 arch/arm/crypto/aes-ce-glue.c 		skcipher_walk_abort(&walk);
walk              484 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_virt(&walk, req, false);
walk              489 arch/arm/crypto/aes-ce-glue.c 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
walk              490 arch/arm/crypto/aes-ce-glue.c 		int nbytes = walk.nbytes;
walk              492 arch/arm/crypto/aes-ce-glue.c 		if (walk.nbytes < walk.total)
walk              496 arch/arm/crypto/aes-ce-glue.c 		ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              497 arch/arm/crypto/aes-ce-glue.c 				   ctx->key1.key_enc, rounds, nbytes, walk.iv,
walk              500 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              513 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              518 arch/arm/crypto/aes-ce-glue.c 	ce_aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              519 arch/arm/crypto/aes-ce-glue.c 			   ctx->key1.key_enc, rounds, walk.nbytes, walk.iv,
walk              523 arch/arm/crypto/aes-ce-glue.c 	return skcipher_walk_done(&walk, 0);
walk              535 arch/arm/crypto/aes-ce-glue.c 	struct skcipher_walk walk;
walk              540 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              542 arch/arm/crypto/aes-ce-glue.c 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
walk              546 arch/arm/crypto/aes-ce-glue.c 		skcipher_walk_abort(&walk);
walk              556 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_virt(&walk, req, false);
walk              561 arch/arm/crypto/aes-ce-glue.c 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
walk              562 arch/arm/crypto/aes-ce-glue.c 		int nbytes = walk.nbytes;
walk              564 arch/arm/crypto/aes-ce-glue.c 		if (walk.nbytes < walk.total)
walk              568 arch/arm/crypto/aes-ce-glue.c 		ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              569 arch/arm/crypto/aes-ce-glue.c 				   ctx->key1.key_dec, rounds, nbytes, walk.iv,
walk              572 arch/arm/crypto/aes-ce-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              585 arch/arm/crypto/aes-ce-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              590 arch/arm/crypto/aes-ce-glue.c 	ce_aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              591 arch/arm/crypto/aes-ce-glue.c 			   ctx->key1.key_dec, rounds, walk.nbytes, walk.iv,
walk              595 arch/arm/crypto/aes-ce-glue.c 	return skcipher_walk_done(&walk, 0);
walk               92 arch/arm/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk               95 arch/arm/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               97 arch/arm/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk               98 arch/arm/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              100 arch/arm/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total)
walk              102 arch/arm/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              105 arch/arm/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
walk              108 arch/arm/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              109 arch/arm/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              161 arch/arm/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              164 arch/arm/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              166 arch/arm/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              167 arch/arm/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              169 arch/arm/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total)
walk              171 arch/arm/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              174 arch/arm/crypto/aes-neonbs-glue.c 		aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              176 arch/arm/crypto/aes-neonbs-glue.c 				  walk.iv);
walk              178 arch/arm/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              179 arch/arm/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              224 arch/arm/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              228 arch/arm/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              230 arch/arm/crypto/aes-neonbs-glue.c 	while (walk.nbytes > 0) {
walk              231 arch/arm/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              232 arch/arm/crypto/aes-neonbs-glue.c 		u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
walk              234 arch/arm/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total) {
walk              236 arch/arm/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              241 arch/arm/crypto/aes-neonbs-glue.c 		aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              242 arch/arm/crypto/aes-neonbs-glue.c 				  ctx->rk, ctx->rounds, blocks, walk.iv, final);
walk              246 arch/arm/crypto/aes-neonbs-glue.c 			u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
walk              247 arch/arm/crypto/aes-neonbs-glue.c 			u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
walk              250 arch/arm/crypto/aes-neonbs-glue.c 				       walk.total % AES_BLOCK_SIZE);
walk              252 arch/arm/crypto/aes-neonbs-glue.c 			err = skcipher_walk_done(&walk, 0);
walk              255 arch/arm/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              256 arch/arm/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              338 arch/arm/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              354 arch/arm/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              358 arch/arm/crypto/aes-neonbs-glue.c 	crypto_cipher_encrypt_one(ctx->tweak_tfm, walk.iv, walk.iv);
walk              360 arch/arm/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              361 arch/arm/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              364 arch/arm/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total) {
walk              366 arch/arm/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              371 arch/arm/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->key.rk,
walk              372 arch/arm/crypto/aes-neonbs-glue.c 		   ctx->key.rounds, blocks, walk.iv, reorder_last_tweak);
walk              374 arch/arm/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              375 arch/arm/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk               68 arch/arm/crypto/chacha-neon-glue.c 	struct skcipher_walk walk;
walk               72 arch/arm/crypto/chacha-neon-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               76 arch/arm/crypto/chacha-neon-glue.c 	while (walk.nbytes > 0) {
walk               77 arch/arm/crypto/chacha-neon-glue.c 		unsigned int nbytes = walk.nbytes;
walk               79 arch/arm/crypto/chacha-neon-glue.c 		if (nbytes < walk.total)
walk               80 arch/arm/crypto/chacha-neon-glue.c 			nbytes = round_down(nbytes, walk.stride);
walk               83 arch/arm/crypto/chacha-neon-glue.c 		chacha_doneon(state, walk.dst.virt.addr, walk.src.virt.addr,
walk               86 arch/arm/crypto/chacha-neon-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              145 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct scatter_walk walk;
walk              160 arch/arm64/crypto/aes-ce-ccm-glue.c 	scatterwalk_start(&walk, req->src);
walk              163 arch/arm64/crypto/aes-ce-ccm-glue.c 		u32 n = scatterwalk_clamp(&walk, len);
walk              167 arch/arm64/crypto/aes-ce-ccm-glue.c 			scatterwalk_start(&walk, sg_next(walk.sg));
walk              168 arch/arm64/crypto/aes-ce-ccm-glue.c 			n = scatterwalk_clamp(&walk, len);
walk              170 arch/arm64/crypto/aes-ce-ccm-glue.c 		p = scatterwalk_map(&walk);
walk              175 arch/arm64/crypto/aes-ce-ccm-glue.c 		scatterwalk_advance(&walk, n);
walk              176 arch/arm64/crypto/aes-ce-ccm-glue.c 		scatterwalk_done(&walk, 0, len);
walk              180 arch/arm64/crypto/aes-ce-ccm-glue.c static int ccm_crypt_fallback(struct skcipher_walk *walk, u8 mac[], u8 iv0[],
walk              186 arch/arm64/crypto/aes-ce-ccm-glue.c 	while (walk->nbytes) {
walk              187 arch/arm64/crypto/aes-ce-ccm-glue.c 		int blocks = walk->nbytes / AES_BLOCK_SIZE;
walk              188 arch/arm64/crypto/aes-ce-ccm-glue.c 		u32 tail = walk->nbytes % AES_BLOCK_SIZE;
walk              189 arch/arm64/crypto/aes-ce-ccm-glue.c 		u8 *dst = walk->dst.virt.addr;
walk              190 arch/arm64/crypto/aes-ce-ccm-glue.c 		u8 *src = walk->src.virt.addr;
walk              191 arch/arm64/crypto/aes-ce-ccm-glue.c 		u32 nbytes = walk->nbytes;
walk              193 arch/arm64/crypto/aes-ce-ccm-glue.c 		if (nbytes == walk->total && tail > 0) {
walk              204 arch/arm64/crypto/aes-ce-ccm-glue.c 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
walk              205 arch/arm64/crypto/aes-ce-ccm-glue.c 			aes_encrypt(ctx, buf, walk->iv);
walk              217 arch/arm64/crypto/aes-ce-ccm-glue.c 		err = skcipher_walk_done(walk, tail);
walk              232 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct skcipher_walk walk;
walk              248 arch/arm64/crypto/aes-ce-ccm-glue.c 	err = skcipher_walk_aead_encrypt(&walk, req, false);
walk              251 arch/arm64/crypto/aes-ce-ccm-glue.c 		while (walk.nbytes) {
walk              252 arch/arm64/crypto/aes-ce-ccm-glue.c 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
walk              254 arch/arm64/crypto/aes-ce-ccm-glue.c 			if (walk.nbytes == walk.total)
walk              258 arch/arm64/crypto/aes-ce-ccm-glue.c 			ce_aes_ccm_encrypt(walk.dst.virt.addr,
walk              259 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.src.virt.addr,
walk              260 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.nbytes - tail, ctx->key_enc,
walk              261 arch/arm64/crypto/aes-ce-ccm-glue.c 					   num_rounds(ctx), mac, walk.iv);
walk              264 arch/arm64/crypto/aes-ce-ccm-glue.c 			err = skcipher_walk_done(&walk, tail);
walk              273 arch/arm64/crypto/aes-ce-ccm-glue.c 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, true);
walk              290 arch/arm64/crypto/aes-ce-ccm-glue.c 	struct skcipher_walk walk;
walk              306 arch/arm64/crypto/aes-ce-ccm-glue.c 	err = skcipher_walk_aead_decrypt(&walk, req, false);
walk              309 arch/arm64/crypto/aes-ce-ccm-glue.c 		while (walk.nbytes) {
walk              310 arch/arm64/crypto/aes-ce-ccm-glue.c 			u32 tail = walk.nbytes % AES_BLOCK_SIZE;
walk              312 arch/arm64/crypto/aes-ce-ccm-glue.c 			if (walk.nbytes == walk.total)
walk              316 arch/arm64/crypto/aes-ce-ccm-glue.c 			ce_aes_ccm_decrypt(walk.dst.virt.addr,
walk              317 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.src.virt.addr,
walk              318 arch/arm64/crypto/aes-ce-ccm-glue.c 					   walk.nbytes - tail, ctx->key_enc,
walk              319 arch/arm64/crypto/aes-ce-ccm-glue.c 					   num_rounds(ctx), mac, walk.iv);
walk              322 arch/arm64/crypto/aes-ce-ccm-glue.c 			err = skcipher_walk_done(&walk, tail);
walk              331 arch/arm64/crypto/aes-ce-ccm-glue.c 		err = ccm_crypt_fallback(&walk, mac, buf, ctx, false);
walk              196 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              199 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              201 arch/arm64/crypto/aes-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              203 arch/arm64/crypto/aes-glue.c 		aes_ecb_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              206 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              216 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              219 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              221 arch/arm64/crypto/aes-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              223 arch/arm64/crypto/aes-glue.c 		aes_ecb_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              226 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              232 arch/arm64/crypto/aes-glue.c 			    struct skcipher_walk *walk)
walk              239 arch/arm64/crypto/aes-glue.c 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
walk              241 arch/arm64/crypto/aes-glue.c 		aes_cbc_encrypt(walk->dst.virt.addr, walk->src.virt.addr,
walk              242 arch/arm64/crypto/aes-glue.c 				ctx->key_enc, rounds, blocks, walk->iv);
walk              244 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
walk              251 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              254 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              257 arch/arm64/crypto/aes-glue.c 	return cbc_encrypt_walk(req, &walk);
walk              261 arch/arm64/crypto/aes-glue.c 			    struct skcipher_walk *walk)
walk              268 arch/arm64/crypto/aes-glue.c 	while ((blocks = (walk->nbytes / AES_BLOCK_SIZE))) {
walk              270 arch/arm64/crypto/aes-glue.c 		aes_cbc_decrypt(walk->dst.virt.addr, walk->src.virt.addr,
walk              271 arch/arm64/crypto/aes-glue.c 				ctx->key_dec, rounds, blocks, walk->iv);
walk              273 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(walk, walk->nbytes % AES_BLOCK_SIZE);
walk              280 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              283 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              286 arch/arm64/crypto/aes-glue.c 	return cbc_decrypt_walk(req, &walk);
walk              298 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              315 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
walk              316 arch/arm64/crypto/aes-glue.c 		      cbc_encrypt_walk(&subreq, &walk);
walk              334 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              339 arch/arm64/crypto/aes-glue.c 	aes_cbc_cts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              340 arch/arm64/crypto/aes-glue.c 			    ctx->key_enc, rounds, walk.nbytes, walk.iv);
walk              343 arch/arm64/crypto/aes-glue.c 	return skcipher_walk_done(&walk, 0);
walk              355 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              372 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_virt(&walk, &subreq, false) ?:
walk              373 arch/arm64/crypto/aes-glue.c 		      cbc_decrypt_walk(&subreq, &walk);
walk              391 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              396 arch/arm64/crypto/aes-glue.c 	aes_cbc_cts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              397 arch/arm64/crypto/aes-glue.c 			    ctx->key_dec, rounds, walk.nbytes, walk.iv);
walk              400 arch/arm64/crypto/aes-glue.c 	return skcipher_walk_done(&walk, 0);
walk              424 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              427 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              429 arch/arm64/crypto/aes-glue.c 	blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              432 arch/arm64/crypto/aes-glue.c 		aes_essiv_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              436 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              438 arch/arm64/crypto/aes-glue.c 	return err ?: cbc_encrypt_walk(req, &walk);
walk              446 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              449 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              451 arch/arm64/crypto/aes-glue.c 	blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              454 arch/arm64/crypto/aes-glue.c 		aes_essiv_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              458 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              460 arch/arm64/crypto/aes-glue.c 	return err ?: cbc_decrypt_walk(req, &walk);
walk              468 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              471 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              473 arch/arm64/crypto/aes-glue.c 	while ((blocks = (walk.nbytes / AES_BLOCK_SIZE))) {
walk              475 arch/arm64/crypto/aes-glue.c 		aes_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              476 arch/arm64/crypto/aes-glue.c 				ctx->key_enc, rounds, blocks, walk.iv);
walk              478 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              480 arch/arm64/crypto/aes-glue.c 	if (walk.nbytes) {
walk              482 arch/arm64/crypto/aes-glue.c 		unsigned int nbytes = walk.nbytes;
walk              483 arch/arm64/crypto/aes-glue.c 		u8 *tdst = walk.dst.virt.addr;
walk              484 arch/arm64/crypto/aes-glue.c 		u8 *tsrc = walk.src.virt.addr;
walk              493 arch/arm64/crypto/aes-glue.c 				blocks, walk.iv);
walk              496 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, 0);
walk              534 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              539 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              541 arch/arm64/crypto/aes-glue.c 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
walk              545 arch/arm64/crypto/aes-glue.c 		skcipher_walk_abort(&walk);
walk              555 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_virt(&walk, req, false);
walk              560 arch/arm64/crypto/aes-glue.c 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
walk              561 arch/arm64/crypto/aes-glue.c 		int nbytes = walk.nbytes;
walk              563 arch/arm64/crypto/aes-glue.c 		if (walk.nbytes < walk.total)
walk              567 arch/arm64/crypto/aes-glue.c 		aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              569 arch/arm64/crypto/aes-glue.c 				ctx->key2.key_enc, walk.iv, first);
walk              571 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              584 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              589 arch/arm64/crypto/aes-glue.c 	aes_xts_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              590 arch/arm64/crypto/aes-glue.c 			ctx->key1.key_enc, rounds, walk.nbytes,
walk              591 arch/arm64/crypto/aes-glue.c 			ctx->key2.key_enc, walk.iv, first);
walk              594 arch/arm64/crypto/aes-glue.c 	return skcipher_walk_done(&walk, 0);
walk              606 arch/arm64/crypto/aes-glue.c 	struct skcipher_walk walk;
walk              611 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              613 arch/arm64/crypto/aes-glue.c 	if (unlikely(tail > 0 && walk.nbytes < walk.total)) {
walk              617 arch/arm64/crypto/aes-glue.c 		skcipher_walk_abort(&walk);
walk              627 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_virt(&walk, req, false);
walk              632 arch/arm64/crypto/aes-glue.c 	for (first = 1; walk.nbytes >= AES_BLOCK_SIZE; first = 0) {
walk              633 arch/arm64/crypto/aes-glue.c 		int nbytes = walk.nbytes;
walk              635 arch/arm64/crypto/aes-glue.c 		if (walk.nbytes < walk.total)
walk              639 arch/arm64/crypto/aes-glue.c 		aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              641 arch/arm64/crypto/aes-glue.c 				ctx->key2.key_enc, walk.iv, first);
walk              643 arch/arm64/crypto/aes-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              656 arch/arm64/crypto/aes-glue.c 	err = skcipher_walk_virt(&walk, &subreq, false);
walk              662 arch/arm64/crypto/aes-glue.c 	aes_xts_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              663 arch/arm64/crypto/aes-glue.c 			ctx->key1.key_dec, rounds, walk.nbytes,
walk              664 arch/arm64/crypto/aes-glue.c 			ctx->key2.key_enc, walk.iv, first);
walk              667 arch/arm64/crypto/aes-glue.c 	return skcipher_walk_done(&walk, 0);
walk              103 arch/arm64/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              106 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              108 arch/arm64/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              109 arch/arm64/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              111 arch/arm64/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total)
walk              113 arch/arm64/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              116 arch/arm64/crypto/aes-neonbs-glue.c 		fn(walk.dst.virt.addr, walk.src.virt.addr, ctx->rk,
walk              119 arch/arm64/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              120 arch/arm64/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              162 arch/arm64/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              165 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              167 arch/arm64/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              168 arch/arm64/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              172 arch/arm64/crypto/aes-neonbs-glue.c 		neon_aes_cbc_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              174 arch/arm64/crypto/aes-neonbs-glue.c 				     walk.iv);
walk              176 arch/arm64/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes % AES_BLOCK_SIZE);
walk              185 arch/arm64/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              188 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              190 arch/arm64/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              191 arch/arm64/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              193 arch/arm64/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total)
walk              195 arch/arm64/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              198 arch/arm64/crypto/aes-neonbs-glue.c 		aesbs_cbc_decrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              200 arch/arm64/crypto/aes-neonbs-glue.c 				  walk.iv);
walk              202 arch/arm64/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              203 arch/arm64/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              232 arch/arm64/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              236 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              238 arch/arm64/crypto/aes-neonbs-glue.c 	while (walk.nbytes > 0) {
walk              239 arch/arm64/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              240 arch/arm64/crypto/aes-neonbs-glue.c 		u8 *final = (walk.total % AES_BLOCK_SIZE) ? buf : NULL;
walk              242 arch/arm64/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total) {
walk              244 arch/arm64/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              249 arch/arm64/crypto/aes-neonbs-glue.c 		aesbs_ctr_encrypt(walk.dst.virt.addr, walk.src.virt.addr,
walk              250 arch/arm64/crypto/aes-neonbs-glue.c 				  ctx->rk, ctx->rounds, blocks, walk.iv, final);
walk              254 arch/arm64/crypto/aes-neonbs-glue.c 			u8 *dst = walk.dst.virt.addr + blocks * AES_BLOCK_SIZE;
walk              255 arch/arm64/crypto/aes-neonbs-glue.c 			u8 *src = walk.src.virt.addr + blocks * AES_BLOCK_SIZE;
walk              258 arch/arm64/crypto/aes-neonbs-glue.c 				       walk.total % AES_BLOCK_SIZE);
walk              260 arch/arm64/crypto/aes-neonbs-glue.c 			err = skcipher_walk_done(&walk, 0);
walk              263 arch/arm64/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk,
walk              264 arch/arm64/crypto/aes-neonbs-glue.c 					 walk.nbytes - blocks * AES_BLOCK_SIZE);
walk              327 arch/arm64/crypto/aes-neonbs-glue.c 	struct skcipher_walk walk;
walk              352 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              356 arch/arm64/crypto/aes-neonbs-glue.c 	while (walk.nbytes >= AES_BLOCK_SIZE) {
walk              357 arch/arm64/crypto/aes-neonbs-glue.c 		unsigned int blocks = walk.nbytes / AES_BLOCK_SIZE;
walk              359 arch/arm64/crypto/aes-neonbs-glue.c 		if (walk.nbytes < walk.total || walk.nbytes % AES_BLOCK_SIZE)
walk              361 arch/arm64/crypto/aes-neonbs-glue.c 					    walk.stride / AES_BLOCK_SIZE);
walk              363 arch/arm64/crypto/aes-neonbs-glue.c 		out = walk.dst.virt.addr;
walk              364 arch/arm64/crypto/aes-neonbs-glue.c 		in = walk.src.virt.addr;
walk              365 arch/arm64/crypto/aes-neonbs-glue.c 		nbytes = walk.nbytes;
walk              370 arch/arm64/crypto/aes-neonbs-glue.c 				neon_aes_ecb_encrypt(walk.iv, walk.iv,
walk              376 arch/arm64/crypto/aes-neonbs-glue.c 			   walk.iv);
walk              383 arch/arm64/crypto/aes-neonbs-glue.c 		if (walk.nbytes == walk.total && nbytes > 0)
walk              387 arch/arm64/crypto/aes-neonbs-glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              401 arch/arm64/crypto/aes-neonbs-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              405 arch/arm64/crypto/aes-neonbs-glue.c 	out = walk.dst.virt.addr;
walk              406 arch/arm64/crypto/aes-neonbs-glue.c 	in = walk.src.virt.addr;
walk              407 arch/arm64/crypto/aes-neonbs-glue.c 	nbytes = walk.nbytes;
walk              413 arch/arm64/crypto/aes-neonbs-glue.c 				     nbytes, ctx->twkey, walk.iv, first ?: 2);
walk              416 arch/arm64/crypto/aes-neonbs-glue.c 				     nbytes, ctx->twkey, walk.iv, first ?: 2);
walk              419 arch/arm64/crypto/aes-neonbs-glue.c 	return skcipher_walk_done(&walk, 0);
walk               65 arch/arm64/crypto/chacha-neon-glue.c 	struct skcipher_walk walk;
walk               69 arch/arm64/crypto/chacha-neon-glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               73 arch/arm64/crypto/chacha-neon-glue.c 	while (walk.nbytes > 0) {
walk               74 arch/arm64/crypto/chacha-neon-glue.c 		unsigned int nbytes = walk.nbytes;
walk               76 arch/arm64/crypto/chacha-neon-glue.c 		if (nbytes < walk.total)
walk               77 arch/arm64/crypto/chacha-neon-glue.c 			nbytes = rounddown(nbytes, walk.stride);
walk               80 arch/arm64/crypto/chacha-neon-glue.c 		chacha_doneon(state, walk.dst.virt.addr, walk.src.virt.addr,
walk               83 arch/arm64/crypto/chacha-neon-glue.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk              370 arch/arm64/crypto/ghash-ce-glue.c 	struct scatter_walk walk;
walk              374 arch/arm64/crypto/ghash-ce-glue.c 	scatterwalk_start(&walk, req->src);
walk              377 arch/arm64/crypto/ghash-ce-glue.c 		u32 n = scatterwalk_clamp(&walk, len);
walk              381 arch/arm64/crypto/ghash-ce-glue.c 			scatterwalk_start(&walk, sg_next(walk.sg));
walk              382 arch/arm64/crypto/ghash-ce-glue.c 			n = scatterwalk_clamp(&walk, len);
walk              384 arch/arm64/crypto/ghash-ce-glue.c 		p = scatterwalk_map(&walk);
walk              390 arch/arm64/crypto/ghash-ce-glue.c 		scatterwalk_advance(&walk, n);
walk              391 arch/arm64/crypto/ghash-ce-glue.c 		scatterwalk_done(&walk, 0, len);
walk              423 arch/arm64/crypto/ghash-ce-glue.c 	struct skcipher_walk walk;
walk              437 arch/arm64/crypto/ghash-ce-glue.c 	err = skcipher_walk_aead_encrypt(&walk, req, false);
walk              439 arch/arm64/crypto/ghash-ce-glue.c 	if (likely(crypto_simd_usable() && walk.total >= 2 * AES_BLOCK_SIZE)) {
walk              451 arch/arm64/crypto/ghash-ce-glue.c 			int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
walk              456 arch/arm64/crypto/ghash-ce-glue.c 			pmull_gcm_encrypt(blocks, dg, walk.dst.virt.addr,
walk              457 arch/arm64/crypto/ghash-ce-glue.c 					  walk.src.virt.addr, &ctx->ghash_key,
walk              461 arch/arm64/crypto/ghash-ce-glue.c 			err = skcipher_walk_done(&walk,
walk              462 arch/arm64/crypto/ghash-ce-glue.c 					walk.nbytes % (2 * AES_BLOCK_SIZE));
walk              465 arch/arm64/crypto/ghash-ce-glue.c 		} while (walk.nbytes >= 2 * AES_BLOCK_SIZE);
walk              470 arch/arm64/crypto/ghash-ce-glue.c 		while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) {
walk              472 arch/arm64/crypto/ghash-ce-glue.c 				walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
walk              473 arch/arm64/crypto/ghash-ce-glue.c 			u8 *dst = walk.dst.virt.addr;
walk              474 arch/arm64/crypto/ghash-ce-glue.c 			u8 *src = walk.src.virt.addr;
walk              487 arch/arm64/crypto/ghash-ce-glue.c 					walk.dst.virt.addr, &ctx->ghash_key,
walk              490 arch/arm64/crypto/ghash-ce-glue.c 			err = skcipher_walk_done(&walk,
walk              491 arch/arm64/crypto/ghash-ce-glue.c 						 walk.nbytes % (2 * AES_BLOCK_SIZE));
walk              493 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes) {
walk              495 arch/arm64/crypto/ghash-ce-glue.c 			if (walk.nbytes > AES_BLOCK_SIZE) {
walk              503 arch/arm64/crypto/ghash-ce-glue.c 	if (walk.nbytes) {
walk              505 arch/arm64/crypto/ghash-ce-glue.c 		unsigned int nbytes = walk.nbytes;
walk              506 arch/arm64/crypto/ghash-ce-glue.c 		u8 *dst = walk.dst.virt.addr;
walk              509 arch/arm64/crypto/ghash-ce-glue.c 		crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, ks,
walk              510 arch/arm64/crypto/ghash-ce-glue.c 			       walk.nbytes);
walk              512 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes > GHASH_BLOCK_SIZE) {
walk              523 arch/arm64/crypto/ghash-ce-glue.c 		err = skcipher_walk_done(&walk, 0);
walk              543 arch/arm64/crypto/ghash-ce-glue.c 	struct skcipher_walk walk;
walk              557 arch/arm64/crypto/ghash-ce-glue.c 	err = skcipher_walk_aead_decrypt(&walk, req, false);
walk              559 arch/arm64/crypto/ghash-ce-glue.c 	if (likely(crypto_simd_usable() && walk.total >= 2 * AES_BLOCK_SIZE)) {
walk              567 arch/arm64/crypto/ghash-ce-glue.c 			int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
walk              568 arch/arm64/crypto/ghash-ce-glue.c 			int rem = walk.total - blocks * AES_BLOCK_SIZE;
walk              573 arch/arm64/crypto/ghash-ce-glue.c 			pmull_gcm_decrypt(blocks, dg, walk.dst.virt.addr,
walk              574 arch/arm64/crypto/ghash-ce-glue.c 					  walk.src.virt.addr, &ctx->ghash_key,
walk              595 arch/arm64/crypto/ghash-ce-glue.c 			err = skcipher_walk_done(&walk,
walk              596 arch/arm64/crypto/ghash-ce-glue.c 					walk.nbytes % (2 * AES_BLOCK_SIZE));
walk              599 arch/arm64/crypto/ghash-ce-glue.c 		} while (walk.nbytes >= 2 * AES_BLOCK_SIZE);
walk              604 arch/arm64/crypto/ghash-ce-glue.c 		while (walk.nbytes >= (2 * AES_BLOCK_SIZE)) {
walk              605 arch/arm64/crypto/ghash-ce-glue.c 			int blocks = walk.nbytes / (2 * AES_BLOCK_SIZE) * 2;
walk              606 arch/arm64/crypto/ghash-ce-glue.c 			u8 *dst = walk.dst.virt.addr;
walk              607 arch/arm64/crypto/ghash-ce-glue.c 			u8 *src = walk.src.virt.addr;
walk              609 arch/arm64/crypto/ghash-ce-glue.c 			ghash_do_update(blocks, dg, walk.src.virt.addr,
walk              622 arch/arm64/crypto/ghash-ce-glue.c 			err = skcipher_walk_done(&walk,
walk              623 arch/arm64/crypto/ghash-ce-glue.c 						 walk.nbytes % (2 * AES_BLOCK_SIZE));
walk              625 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes) {
walk              626 arch/arm64/crypto/ghash-ce-glue.c 			if (walk.nbytes > AES_BLOCK_SIZE) {
walk              639 arch/arm64/crypto/ghash-ce-glue.c 	if (walk.nbytes) {
walk              640 arch/arm64/crypto/ghash-ce-glue.c 		const u8 *src = walk.src.virt.addr;
walk              642 arch/arm64/crypto/ghash-ce-glue.c 		unsigned int nbytes = walk.nbytes;
walk              644 arch/arm64/crypto/ghash-ce-glue.c 		if (walk.nbytes > GHASH_BLOCK_SIZE) {
walk              655 arch/arm64/crypto/ghash-ce-glue.c 		crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, iv,
walk              656 arch/arm64/crypto/ghash-ce-glue.c 			       walk.nbytes);
walk              658 arch/arm64/crypto/ghash-ce-glue.c 		err = skcipher_walk_done(&walk, 0);
walk              333 arch/ia64/kernel/efi.c 	walk(callback, arg, EFI_MEMORY_WB);
walk              343 arch/ia64/kernel/efi.c 	walk(callback, arg, EFI_MEMORY_UC);
walk               27 arch/openrisc/kernel/dma.c 		 unsigned long next, struct mm_walk *walk)
walk               53 arch/openrisc/kernel/dma.c 		   unsigned long next, struct mm_walk *walk)
walk              185 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              190 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              191 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              193 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              199 arch/powerpc/crypto/aes-spe-glue.c 		ppc_encrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr,
walk              203 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              213 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              218 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              219 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              221 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              227 arch/powerpc/crypto/aes-spe-glue.c 		ppc_decrypt_ecb(walk.dst.virt.addr, walk.src.virt.addr,
walk              231 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              241 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              246 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              247 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              249 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              255 arch/powerpc/crypto/aes-spe-glue.c 		ppc_encrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr,
walk              256 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_enc, ctx->rounds, nbytes, walk.iv);
walk              259 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              269 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              274 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              275 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              277 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              283 arch/powerpc/crypto/aes-spe-glue.c 		ppc_decrypt_cbc(walk.dst.virt.addr, walk.src.virt.addr,
walk              284 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_dec, ctx->rounds, nbytes, walk.iv);
walk              287 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              297 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              302 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              303 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
walk              305 arch/powerpc/crypto/aes-spe-glue.c 	while ((pbytes = walk.nbytes)) {
walk              309 arch/powerpc/crypto/aes-spe-glue.c 		ubytes = walk.nbytes - pbytes;
walk              312 arch/powerpc/crypto/aes-spe-glue.c 		ppc_crypt_ctr(walk.dst.virt.addr, walk.src.virt.addr,
walk              313 arch/powerpc/crypto/aes-spe-glue.c 			      ctx->key_enc, ctx->rounds, pbytes , walk.iv);
walk              317 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              327 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              333 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              334 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              337 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              343 arch/powerpc/crypto/aes-spe-glue.c 		ppc_encrypt_xts(walk.dst.virt.addr, walk.src.virt.addr,
walk              344 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_enc, ctx->rounds, nbytes, walk.iv, twk);
walk              348 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              358 arch/powerpc/crypto/aes-spe-glue.c 	struct blkcipher_walk walk;
walk              364 arch/powerpc/crypto/aes-spe-glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              365 arch/powerpc/crypto/aes-spe-glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              368 arch/powerpc/crypto/aes-spe-glue.c 	while ((nbytes = walk.nbytes)) {
walk              374 arch/powerpc/crypto/aes-spe-glue.c 		ppc_decrypt_xts(walk.dst.virt.addr, walk.src.virt.addr,
walk              375 arch/powerpc/crypto/aes-spe-glue.c 				ctx->key_dec, ctx->rounds, nbytes, walk.iv, twk);
walk              379 arch/powerpc/crypto/aes-spe-glue.c 		err = blkcipher_walk_done(desc, &walk, ubytes);
walk              135 arch/powerpc/mm/book3s64/subpage_prot.c 				  unsigned long end, struct mm_walk *walk)
walk              137 arch/powerpc/mm/book3s64/subpage_prot.c 	struct vm_area_struct *vma = walk->vma;
walk               61 arch/s390/crypto/aes_s390.c 	struct scatter_walk walk;
walk              259 arch/s390/crypto/aes_s390.c 			 struct blkcipher_walk *walk)
walk              265 arch/s390/crypto/aes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              266 arch/s390/crypto/aes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              270 arch/s390/crypto/aes_s390.c 			 walk->dst.virt.addr, walk->src.virt.addr, n);
walk              271 arch/s390/crypto/aes_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              282 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              287 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              288 arch/s390/crypto/aes_s390.c 	return ecb_aes_crypt(desc, 0, &walk);
walk              296 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              301 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              302 arch/s390/crypto/aes_s390.c 	return ecb_aes_crypt(desc, CPACF_DECRYPT, &walk);
walk              374 arch/s390/crypto/aes_s390.c 			 struct blkcipher_walk *walk)
walk              384 arch/s390/crypto/aes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              385 arch/s390/crypto/aes_s390.c 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
walk              387 arch/s390/crypto/aes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              391 arch/s390/crypto/aes_s390.c 			  walk->dst.virt.addr, walk->src.virt.addr, n);
walk              392 arch/s390/crypto/aes_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              394 arch/s390/crypto/aes_s390.c 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
walk              403 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              408 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              409 arch/s390/crypto/aes_s390.c 	return cbc_aes_crypt(desc, 0, &walk);
walk              417 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              422 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              423 arch/s390/crypto/aes_s390.c 	return cbc_aes_crypt(desc, CPACF_DECRYPT, &walk);
walk              543 arch/s390/crypto/aes_s390.c 			 struct blkcipher_walk *walk)
walk              560 arch/s390/crypto/aes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              565 arch/s390/crypto/aes_s390.c 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
walk              572 arch/s390/crypto/aes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              576 arch/s390/crypto/aes_s390.c 			 walk->dst.virt.addr, walk->src.virt.addr, n);
walk              577 arch/s390/crypto/aes_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              587 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              595 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              596 arch/s390/crypto/aes_s390.c 	return xts_aes_crypt(desc, 0, &walk);
walk              604 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              612 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              613 arch/s390/crypto/aes_s390.c 	return xts_aes_crypt(desc, CPACF_DECRYPT, &walk);
walk              700 arch/s390/crypto/aes_s390.c 			 struct blkcipher_walk *walk)
walk              709 arch/s390/crypto/aes_s390.c 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
walk              710 arch/s390/crypto/aes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              713 arch/s390/crypto/aes_s390.c 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
walk              714 arch/s390/crypto/aes_s390.c 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
walk              716 arch/s390/crypto/aes_s390.c 			    walk->dst.virt.addr, walk->src.virt.addr,
walk              719 arch/s390/crypto/aes_s390.c 			memcpy(walk->iv, ctrptr + n - AES_BLOCK_SIZE,
walk              721 arch/s390/crypto/aes_s390.c 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
walk              722 arch/s390/crypto/aes_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              731 arch/s390/crypto/aes_s390.c 			    buf, walk->src.virt.addr,
walk              732 arch/s390/crypto/aes_s390.c 			    AES_BLOCK_SIZE, walk->iv);
walk              733 arch/s390/crypto/aes_s390.c 		memcpy(walk->dst.virt.addr, buf, nbytes);
walk              734 arch/s390/crypto/aes_s390.c 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
walk              735 arch/s390/crypto/aes_s390.c 		ret = blkcipher_walk_done(desc, walk, 0);
walk              746 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              751 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              752 arch/s390/crypto/aes_s390.c 	return ctr_aes_crypt(desc, 0, &walk);
walk              760 arch/s390/crypto/aes_s390.c 	struct blkcipher_walk walk;
walk              765 arch/s390/crypto/aes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              766 arch/s390/crypto/aes_s390.c 	return ctr_aes_crypt(desc, CPACF_DECRYPT, &walk);
walk              840 arch/s390/crypto/aes_s390.c 	scatterwalk_start(&gw->walk, sg);
walk              847 arch/s390/crypto/aes_s390.c 	gw->walk_bytes = scatterwalk_clamp(&gw->walk, gw->walk_bytes_remain);
walk              849 arch/s390/crypto/aes_s390.c 		nextsg = sg_next(gw->walk.sg);
walk              852 arch/s390/crypto/aes_s390.c 		scatterwalk_start(&gw->walk, nextsg);
walk              853 arch/s390/crypto/aes_s390.c 		gw->walk_bytes = scatterwalk_clamp(&gw->walk,
walk              856 arch/s390/crypto/aes_s390.c 	gw->walk_ptr = scatterwalk_map(&gw->walk);
walk              864 arch/s390/crypto/aes_s390.c 	scatterwalk_unmap(&gw->walk);
walk              865 arch/s390/crypto/aes_s390.c 	scatterwalk_advance(&gw->walk, nbytes);
walk              866 arch/s390/crypto/aes_s390.c 	scatterwalk_done(&gw->walk, 0, gw->walk_bytes_remain);
walk              939 arch/s390/crypto/aes_s390.c 	scatterwalk_unmap(&gw->walk);
walk               83 arch/s390/crypto/des_s390.c 			    struct blkcipher_walk *walk)
walk               89 arch/s390/crypto/des_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk               90 arch/s390/crypto/des_s390.c 	while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
walk               93 arch/s390/crypto/des_s390.c 		cpacf_km(fc, ctx->key, walk->dst.virt.addr,
walk               94 arch/s390/crypto/des_s390.c 			 walk->src.virt.addr, n);
walk               95 arch/s390/crypto/des_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              101 arch/s390/crypto/des_s390.c 			    struct blkcipher_walk *walk)
walk              111 arch/s390/crypto/des_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              112 arch/s390/crypto/des_s390.c 	memcpy(param.iv, walk->iv, DES_BLOCK_SIZE);
walk              114 arch/s390/crypto/des_s390.c 	while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
walk              117 arch/s390/crypto/des_s390.c 		cpacf_kmc(fc, &param, walk->dst.virt.addr,
walk              118 arch/s390/crypto/des_s390.c 			  walk->src.virt.addr, n);
walk              119 arch/s390/crypto/des_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              121 arch/s390/crypto/des_s390.c 	memcpy(walk->iv, param.iv, DES_BLOCK_SIZE);
walk              129 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              131 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              132 arch/s390/crypto/des_s390.c 	return ecb_desall_crypt(desc, CPACF_KM_DEA, &walk);
walk              139 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              141 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              142 arch/s390/crypto/des_s390.c 	return ecb_desall_crypt(desc, CPACF_KM_DEA | CPACF_DECRYPT, &walk);
walk              169 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              171 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              172 arch/s390/crypto/des_s390.c 	return cbc_desall_crypt(desc, CPACF_KMC_DEA, &walk);
walk              179 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              181 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              182 arch/s390/crypto/des_s390.c 	return cbc_desall_crypt(desc, CPACF_KMC_DEA | CPACF_DECRYPT, &walk);
walk              273 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              275 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              276 arch/s390/crypto/des_s390.c 	return ecb_desall_crypt(desc, CPACF_KM_TDEA_192, &walk);
walk              283 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              285 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              287 arch/s390/crypto/des_s390.c 				&walk);
walk              314 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              316 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              317 arch/s390/crypto/des_s390.c 	return cbc_desall_crypt(desc, CPACF_KMC_TDEA_192, &walk);
walk              324 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              326 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              328 arch/s390/crypto/des_s390.c 				&walk);
walk              368 arch/s390/crypto/des_s390.c 			    struct blkcipher_walk *walk)
walk              377 arch/s390/crypto/des_s390.c 	ret = blkcipher_walk_virt_block(desc, walk, DES_BLOCK_SIZE);
walk              378 arch/s390/crypto/des_s390.c 	while ((nbytes = walk->nbytes) >= DES_BLOCK_SIZE) {
walk              381 arch/s390/crypto/des_s390.c 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
walk              382 arch/s390/crypto/des_s390.c 		ctrptr = (n > DES_BLOCK_SIZE) ? ctrblk : walk->iv;
walk              383 arch/s390/crypto/des_s390.c 		cpacf_kmctr(fc, ctx->key, walk->dst.virt.addr,
walk              384 arch/s390/crypto/des_s390.c 			    walk->src.virt.addr, n, ctrptr);
walk              386 arch/s390/crypto/des_s390.c 			memcpy(walk->iv, ctrptr + n - DES_BLOCK_SIZE,
walk              388 arch/s390/crypto/des_s390.c 		crypto_inc(walk->iv, DES_BLOCK_SIZE);
walk              389 arch/s390/crypto/des_s390.c 		ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              395 arch/s390/crypto/des_s390.c 		cpacf_kmctr(fc, ctx->key, buf, walk->src.virt.addr,
walk              396 arch/s390/crypto/des_s390.c 			    DES_BLOCK_SIZE, walk->iv);
walk              397 arch/s390/crypto/des_s390.c 		memcpy(walk->dst.virt.addr, buf, nbytes);
walk              398 arch/s390/crypto/des_s390.c 		crypto_inc(walk->iv, DES_BLOCK_SIZE);
walk              399 arch/s390/crypto/des_s390.c 		ret = blkcipher_walk_done(desc, walk, 0);
walk              408 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              410 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              411 arch/s390/crypto/des_s390.c 	return ctr_desall_crypt(desc, CPACF_KMCTR_DEA, &walk);
walk              418 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              420 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              421 arch/s390/crypto/des_s390.c 	return ctr_desall_crypt(desc, CPACF_KMCTR_DEA | CPACF_DECRYPT, &walk);
walk              449 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              451 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              452 arch/s390/crypto/des_s390.c 	return ctr_desall_crypt(desc, CPACF_KMCTR_TDEA_192, &walk);
walk              459 arch/s390/crypto/des_s390.c 	struct blkcipher_walk walk;
walk              461 arch/s390/crypto/des_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              463 arch/s390/crypto/des_s390.c 				&walk);
walk              162 arch/s390/crypto/paes_s390.c 			  struct blkcipher_walk *walk)
walk              168 arch/s390/crypto/paes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              169 arch/s390/crypto/paes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              173 arch/s390/crypto/paes_s390.c 			     walk->dst.virt.addr, walk->src.virt.addr, n);
walk              175 arch/s390/crypto/paes_s390.c 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
walk              178 arch/s390/crypto/paes_s390.c 				return blkcipher_walk_done(desc, walk, -EIO);
walk              188 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              190 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              191 arch/s390/crypto/paes_s390.c 	return ecb_paes_crypt(desc, CPACF_ENCRYPT, &walk);
walk              198 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              200 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              201 arch/s390/crypto/paes_s390.c 	return ecb_paes_crypt(desc, CPACF_DECRYPT, &walk);
walk              280 arch/s390/crypto/paes_s390.c 			  struct blkcipher_walk *walk)
walk              290 arch/s390/crypto/paes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              291 arch/s390/crypto/paes_s390.c 	memcpy(param.iv, walk->iv, AES_BLOCK_SIZE);
walk              293 arch/s390/crypto/paes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              297 arch/s390/crypto/paes_s390.c 			      walk->dst.virt.addr, walk->src.virt.addr, n);
walk              299 arch/s390/crypto/paes_s390.c 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
walk              302 arch/s390/crypto/paes_s390.c 				return blkcipher_walk_done(desc, walk, -EIO);
walk              306 arch/s390/crypto/paes_s390.c 	memcpy(walk->iv, param.iv, AES_BLOCK_SIZE);
walk              314 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              316 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              317 arch/s390/crypto/paes_s390.c 	return cbc_paes_crypt(desc, 0, &walk);
walk              324 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              326 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              327 arch/s390/crypto/paes_s390.c 	return cbc_paes_crypt(desc, CPACF_DECRYPT, &walk);
walk              434 arch/s390/crypto/paes_s390.c 			  struct blkcipher_walk *walk)
walk              451 arch/s390/crypto/paes_s390.c 	ret = blkcipher_walk_virt(desc, walk);
walk              456 arch/s390/crypto/paes_s390.c 	memcpy(pcc_param.tweak, walk->iv, sizeof(pcc_param.tweak));
walk              463 arch/s390/crypto/paes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              467 arch/s390/crypto/paes_s390.c 			     walk->dst.virt.addr, walk->src.virt.addr, n);
walk              469 arch/s390/crypto/paes_s390.c 			ret = blkcipher_walk_done(desc, walk, nbytes - k);
walk              472 arch/s390/crypto/paes_s390.c 				return blkcipher_walk_done(desc, walk, -EIO);
walk              483 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              485 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              486 arch/s390/crypto/paes_s390.c 	return xts_paes_crypt(desc, 0, &walk);
walk              493 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              495 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              496 arch/s390/crypto/paes_s390.c 	return xts_paes_crypt(desc, CPACF_DECRYPT, &walk);
walk              592 arch/s390/crypto/paes_s390.c 			  struct blkcipher_walk *walk)
walk              601 arch/s390/crypto/paes_s390.c 	ret = blkcipher_walk_virt_block(desc, walk, AES_BLOCK_SIZE);
walk              602 arch/s390/crypto/paes_s390.c 	while ((nbytes = walk->nbytes) >= AES_BLOCK_SIZE) {
walk              605 arch/s390/crypto/paes_s390.c 			n = __ctrblk_init(ctrblk, walk->iv, nbytes);
walk              606 arch/s390/crypto/paes_s390.c 		ctrptr = (n > AES_BLOCK_SIZE) ? ctrblk : walk->iv;
walk              608 arch/s390/crypto/paes_s390.c 				walk->dst.virt.addr, walk->src.virt.addr,
walk              612 arch/s390/crypto/paes_s390.c 				memcpy(walk->iv, ctrptr + k - AES_BLOCK_SIZE,
walk              614 arch/s390/crypto/paes_s390.c 			crypto_inc(walk->iv, AES_BLOCK_SIZE);
walk              615 arch/s390/crypto/paes_s390.c 			ret = blkcipher_walk_done(desc, walk, nbytes - n);
walk              621 arch/s390/crypto/paes_s390.c 				return blkcipher_walk_done(desc, walk, -EIO);
walk              634 arch/s390/crypto/paes_s390.c 					walk->src.virt.addr, AES_BLOCK_SIZE,
walk              635 arch/s390/crypto/paes_s390.c 					walk->iv) == AES_BLOCK_SIZE)
walk              638 arch/s390/crypto/paes_s390.c 				return blkcipher_walk_done(desc, walk, -EIO);
walk              640 arch/s390/crypto/paes_s390.c 		memcpy(walk->dst.virt.addr, buf, nbytes);
walk              641 arch/s390/crypto/paes_s390.c 		crypto_inc(walk->iv, AES_BLOCK_SIZE);
walk              642 arch/s390/crypto/paes_s390.c 		ret = blkcipher_walk_done(desc, walk, 0);
walk              652 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              654 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              655 arch/s390/crypto/paes_s390.c 	return ctr_paes_crypt(desc, 0, &walk);
walk              662 arch/s390/crypto/paes_s390.c 	struct blkcipher_walk walk;
walk              664 arch/s390/crypto/paes_s390.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              665 arch/s390/crypto/paes_s390.c 	return ctr_paes_crypt(desc, CPACF_DECRYPT, &walk);
walk             2513 arch/s390/mm/gmap.c 			   unsigned long end, struct mm_walk *walk)
walk             2521 arch/s390/mm/gmap.c 		ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
walk             2523 arch/s390/mm/gmap.c 			ptep_xchg_direct(walk->mm, addr, ptep, __pte(_PAGE_INVALID));
walk             2561 arch/s390/mm/gmap.c 				  unsigned long next, struct mm_walk *walk)
walk             2564 arch/s390/mm/gmap.c 	ptep_zap_key(walk->mm, addr, pte);
walk             2570 arch/s390/mm/gmap.c 				      struct mm_walk *walk)
walk             2631 arch/s390/mm/gmap.c 			     unsigned long next, struct mm_walk *walk)
walk             2633 arch/s390/mm/gmap.c 	ptep_zap_unused(walk->mm, addr, pte, 1);
walk              221 arch/sparc/crypto/aes_glue.c 	struct blkcipher_walk walk;
walk              224 arch/sparc/crypto/aes_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              225 arch/sparc/crypto/aes_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              229 arch/sparc/crypto/aes_glue.c 	while ((nbytes = walk.nbytes)) {
walk              234 arch/sparc/crypto/aes_glue.c 					      (const u64 *)walk.src.virt.addr,
walk              235 arch/sparc/crypto/aes_glue.c 					      (u64 *) walk.dst.virt.addr,
walk              239 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              250 arch/sparc/crypto/aes_glue.c 	struct blkcipher_walk walk;
walk              254 arch/sparc/crypto/aes_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              255 arch/sparc/crypto/aes_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              260 arch/sparc/crypto/aes_glue.c 	while ((nbytes = walk.nbytes)) {
walk              265 arch/sparc/crypto/aes_glue.c 					      (const u64 *) walk.src.virt.addr,
walk              266 arch/sparc/crypto/aes_glue.c 					      (u64 *) walk.dst.virt.addr, block_len);
walk              269 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              281 arch/sparc/crypto/aes_glue.c 	struct blkcipher_walk walk;
walk              284 arch/sparc/crypto/aes_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              285 arch/sparc/crypto/aes_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              289 arch/sparc/crypto/aes_glue.c 	while ((nbytes = walk.nbytes)) {
walk              294 arch/sparc/crypto/aes_glue.c 					      (const u64 *)walk.src.virt.addr,
walk              295 arch/sparc/crypto/aes_glue.c 					      (u64 *) walk.dst.virt.addr,
walk              296 arch/sparc/crypto/aes_glue.c 					      block_len, (u64 *) walk.iv);
walk              299 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              310 arch/sparc/crypto/aes_glue.c 	struct blkcipher_walk walk;
walk              314 arch/sparc/crypto/aes_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              315 arch/sparc/crypto/aes_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              320 arch/sparc/crypto/aes_glue.c 	while ((nbytes = walk.nbytes)) {
walk              325 arch/sparc/crypto/aes_glue.c 					      (const u64 *) walk.src.virt.addr,
walk              326 arch/sparc/crypto/aes_glue.c 					      (u64 *) walk.dst.virt.addr,
walk              327 arch/sparc/crypto/aes_glue.c 					      block_len, (u64 *) walk.iv);
walk              330 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              338 arch/sparc/crypto/aes_glue.c 			    struct blkcipher_walk *walk)
walk              340 arch/sparc/crypto/aes_glue.c 	u8 *ctrblk = walk->iv;
walk              342 arch/sparc/crypto/aes_glue.c 	u8 *src = walk->src.virt.addr;
walk              343 arch/sparc/crypto/aes_glue.c 	u8 *dst = walk->dst.virt.addr;
walk              344 arch/sparc/crypto/aes_glue.c 	unsigned int nbytes = walk->nbytes;
walk              357 arch/sparc/crypto/aes_glue.c 	struct blkcipher_walk walk;
walk              360 arch/sparc/crypto/aes_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              361 arch/sparc/crypto/aes_glue.c 	err = blkcipher_walk_virt_block(desc, &walk, AES_BLOCK_SIZE);
walk              365 arch/sparc/crypto/aes_glue.c 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
walk              370 arch/sparc/crypto/aes_glue.c 					    (const u64 *)walk.src.virt.addr,
walk              371 arch/sparc/crypto/aes_glue.c 					    (u64 *) walk.dst.virt.addr,
walk              372 arch/sparc/crypto/aes_glue.c 					    block_len, (u64 *) walk.iv);
walk              375 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              377 arch/sparc/crypto/aes_glue.c 	if (walk.nbytes) {
walk              378 arch/sparc/crypto/aes_glue.c 		ctr_crypt_final(ctx, &walk);
walk              379 arch/sparc/crypto/aes_glue.c 		err = blkcipher_walk_done(desc, &walk, 0);
walk               91 arch/sparc/crypto/camellia_glue.c 	struct blkcipher_walk walk;
walk              100 arch/sparc/crypto/camellia_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              101 arch/sparc/crypto/camellia_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              109 arch/sparc/crypto/camellia_glue.c 	while ((nbytes = walk.nbytes)) {
walk              116 arch/sparc/crypto/camellia_glue.c 			src64 = (const u64 *)walk.src.virt.addr;
walk              117 arch/sparc/crypto/camellia_glue.c 			dst64 = (u64 *) walk.dst.virt.addr;
walk              121 arch/sparc/crypto/camellia_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              154 arch/sparc/crypto/camellia_glue.c 	struct blkcipher_walk walk;
walk              163 arch/sparc/crypto/camellia_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              164 arch/sparc/crypto/camellia_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              169 arch/sparc/crypto/camellia_glue.c 	while ((nbytes = walk.nbytes)) {
walk              176 arch/sparc/crypto/camellia_glue.c 			src64 = (const u64 *)walk.src.virt.addr;
walk              177 arch/sparc/crypto/camellia_glue.c 			dst64 = (u64 *) walk.dst.virt.addr;
walk              179 arch/sparc/crypto/camellia_glue.c 			   (u64 *) walk.iv);
walk              182 arch/sparc/crypto/camellia_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              193 arch/sparc/crypto/camellia_glue.c 	struct blkcipher_walk walk;
walk              202 arch/sparc/crypto/camellia_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              203 arch/sparc/crypto/camellia_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              208 arch/sparc/crypto/camellia_glue.c 	while ((nbytes = walk.nbytes)) {
walk              215 arch/sparc/crypto/camellia_glue.c 			src64 = (const u64 *)walk.src.virt.addr;
walk              216 arch/sparc/crypto/camellia_glue.c 			dst64 = (u64 *) walk.dst.virt.addr;
walk              218 arch/sparc/crypto/camellia_glue.c 			   (u64 *) walk.iv);
walk              221 arch/sparc/crypto/camellia_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk               95 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk               98 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk               99 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              106 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              110 arch/sparc/crypto/des_glue.c 			des_sparc64_ecb_crypt((const u64 *)walk.src.virt.addr,
walk              111 arch/sparc/crypto/des_glue.c 					      (u64 *) walk.dst.virt.addr,
walk              115 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              143 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk              146 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              147 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              151 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              155 arch/sparc/crypto/des_glue.c 			des_sparc64_cbc_encrypt((const u64 *)walk.src.virt.addr,
walk              156 arch/sparc/crypto/des_glue.c 						(u64 *) walk.dst.virt.addr,
walk              157 arch/sparc/crypto/des_glue.c 						block_len, (u64 *) walk.iv);
walk              160 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              174 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk              177 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              178 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              182 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              186 arch/sparc/crypto/des_glue.c 			des_sparc64_cbc_decrypt((const u64 *)walk.src.virt.addr,
walk              187 arch/sparc/crypto/des_glue.c 						(u64 *) walk.dst.virt.addr,
walk              188 arch/sparc/crypto/des_glue.c 						block_len, (u64 *) walk.iv);
walk              191 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              259 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk              263 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              264 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              272 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              276 arch/sparc/crypto/des_glue.c 			const u64 *src64 = (const u64 *)walk.src.virt.addr;
walk              278 arch/sparc/crypto/des_glue.c 						   (u64 *) walk.dst.virt.addr,
walk              282 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              311 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk              315 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              316 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              321 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              325 arch/sparc/crypto/des_glue.c 			const u64 *src64 = (const u64 *)walk.src.virt.addr;
walk              327 arch/sparc/crypto/des_glue.c 						     (u64 *) walk.dst.virt.addr,
walk              329 arch/sparc/crypto/des_glue.c 						     (u64 *) walk.iv);
walk              332 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              347 arch/sparc/crypto/des_glue.c 	struct blkcipher_walk walk;
walk              351 arch/sparc/crypto/des_glue.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              352 arch/sparc/crypto/des_glue.c 	err = blkcipher_walk_virt(desc, &walk);
walk              357 arch/sparc/crypto/des_glue.c 	while ((nbytes = walk.nbytes)) {
walk              361 arch/sparc/crypto/des_glue.c 			const u64 *src64 = (const u64 *)walk.src.virt.addr;
walk              363 arch/sparc/crypto/des_glue.c 						     (u64 *) walk.dst.virt.addr,
walk              365 arch/sparc/crypto/des_glue.c 						     (u64 *) walk.iv);
walk              368 arch/sparc/crypto/des_glue.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              226 arch/um/kernel/irq.c 	struct irq_entry *walk;
walk              232 arch/um/kernel/irq.c 	walk = active_fds;
walk              233 arch/um/kernel/irq.c 	while (walk != NULL) {
walk              236 arch/um/kernel/irq.c 			if (walk->irq_array[i] != NULL) {
walk              243 arch/um/kernel/irq.c 				active_fds = walk->next;
walk              245 arch/um/kernel/irq.c 				previous->next = walk->next;
walk              246 arch/um/kernel/irq.c 			to_free = walk;
walk              250 arch/um/kernel/irq.c 		walk = walk->next;
walk              261 arch/um/kernel/irq.c 	struct irq_entry *walk = active_fds;
walk              263 arch/um/kernel/irq.c 	while (walk != NULL) {
walk              264 arch/um/kernel/irq.c 		if (walk->fd == fd)
walk              265 arch/um/kernel/irq.c 			return walk;
walk              266 arch/um/kernel/irq.c 		walk = walk->next;
walk               60 arch/x86/crypto/aegis128-aesni-glue.c 	int (*skcipher_walk_init)(struct skcipher_walk *walk,
walk               73 arch/x86/crypto/aegis128-aesni-glue.c 	struct scatter_walk walk;
walk               77 arch/x86/crypto/aegis128-aesni-glue.c 	scatterwalk_start(&walk, sg_src);
walk               79 arch/x86/crypto/aegis128-aesni-glue.c 		unsigned int size = scatterwalk_clamp(&walk, assoclen);
walk               81 arch/x86/crypto/aegis128-aesni-glue.c 		void *mapped = scatterwalk_map(&walk);
walk              107 arch/x86/crypto/aegis128-aesni-glue.c 		scatterwalk_advance(&walk, size);
walk              108 arch/x86/crypto/aegis128-aesni-glue.c 		scatterwalk_done(&walk, 0, assoclen);
walk              118 arch/x86/crypto/aegis128-aesni-glue.c 		struct aegis_state *state, struct skcipher_walk *walk,
walk              121 arch/x86/crypto/aegis128-aesni-glue.c 	while (walk->nbytes >= AEGIS128_BLOCK_SIZE) {
walk              123 arch/x86/crypto/aegis128-aesni-glue.c 				  round_down(walk->nbytes, AEGIS128_BLOCK_SIZE),
walk              124 arch/x86/crypto/aegis128-aesni-glue.c 				  walk->src.virt.addr, walk->dst.virt.addr);
walk              125 arch/x86/crypto/aegis128-aesni-glue.c 		skcipher_walk_done(walk, walk->nbytes % AEGIS128_BLOCK_SIZE);
walk              128 arch/x86/crypto/aegis128-aesni-glue.c 	if (walk->nbytes) {
walk              129 arch/x86/crypto/aegis128-aesni-glue.c 		ops->crypt_tail(state, walk->nbytes, walk->src.virt.addr,
walk              130 arch/x86/crypto/aegis128-aesni-glue.c 				walk->dst.virt.addr);
walk              131 arch/x86/crypto/aegis128-aesni-glue.c 		skcipher_walk_done(walk, 0);
walk              174 arch/x86/crypto/aegis128-aesni-glue.c 	struct skcipher_walk walk;
walk              177 arch/x86/crypto/aegis128-aesni-glue.c 	ops->skcipher_walk_init(&walk, req, true);
walk              183 arch/x86/crypto/aegis128-aesni-glue.c 	crypto_aegis128_aesni_process_crypt(&state, &walk, ops);
walk              384 arch/x86/crypto/aesni-intel_glue.c 	struct skcipher_walk walk;
walk              388 arch/x86/crypto/aesni-intel_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              391 arch/x86/crypto/aesni-intel_glue.c 	while ((nbytes = walk.nbytes)) {
walk              392 arch/x86/crypto/aesni-intel_glue.c 		aesni_ecb_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk              395 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              406 arch/x86/crypto/aesni-intel_glue.c 	struct skcipher_walk walk;
walk              410 arch/x86/crypto/aesni-intel_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              413 arch/x86/crypto/aesni-intel_glue.c 	while ((nbytes = walk.nbytes)) {
walk              414 arch/x86/crypto/aesni-intel_glue.c 		aesni_ecb_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk              417 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              428 arch/x86/crypto/aesni-intel_glue.c 	struct skcipher_walk walk;
walk              432 arch/x86/crypto/aesni-intel_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              435 arch/x86/crypto/aesni-intel_glue.c 	while ((nbytes = walk.nbytes)) {
walk              436 arch/x86/crypto/aesni-intel_glue.c 		aesni_cbc_enc(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk              437 arch/x86/crypto/aesni-intel_glue.c 			      nbytes & AES_BLOCK_MASK, walk.iv);
walk              439 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              450 arch/x86/crypto/aesni-intel_glue.c 	struct skcipher_walk walk;
walk              454 arch/x86/crypto/aesni-intel_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              457 arch/x86/crypto/aesni-intel_glue.c 	while ((nbytes = walk.nbytes)) {
walk              458 arch/x86/crypto/aesni-intel_glue.c 		aesni_cbc_dec(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk              459 arch/x86/crypto/aesni-intel_glue.c 			      nbytes & AES_BLOCK_MASK, walk.iv);
walk              461 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              470 arch/x86/crypto/aesni-intel_glue.c 			    struct skcipher_walk *walk)
walk              472 arch/x86/crypto/aesni-intel_glue.c 	u8 *ctrblk = walk->iv;
walk              474 arch/x86/crypto/aesni-intel_glue.c 	u8 *src = walk->src.virt.addr;
walk              475 arch/x86/crypto/aesni-intel_glue.c 	u8 *dst = walk->dst.virt.addr;
walk              476 arch/x86/crypto/aesni-intel_glue.c 	unsigned int nbytes = walk->nbytes;
walk              507 arch/x86/crypto/aesni-intel_glue.c 	struct skcipher_walk walk;
walk              511 arch/x86/crypto/aesni-intel_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              514 arch/x86/crypto/aesni-intel_glue.c 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
walk              515 arch/x86/crypto/aesni-intel_glue.c 		aesni_ctr_enc_tfm(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk              516 arch/x86/crypto/aesni-intel_glue.c 			              nbytes & AES_BLOCK_MASK, walk.iv);
walk              518 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              520 arch/x86/crypto/aesni-intel_glue.c 	if (walk.nbytes) {
walk              521 arch/x86/crypto/aesni-intel_glue.c 		ctr_crypt_final(ctx, &walk);
walk              522 arch/x86/crypto/aesni-intel_glue.c 		err = skcipher_walk_done(&walk, 0);
walk               78 arch/x86/crypto/blowfish_glue.c 	struct skcipher_walk walk;
walk               82 arch/x86/crypto/blowfish_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               84 arch/x86/crypto/blowfish_glue.c 	while ((nbytes = walk.nbytes)) {
walk               85 arch/x86/crypto/blowfish_glue.c 		u8 *wsrc = walk.src.virt.addr;
walk               86 arch/x86/crypto/blowfish_glue.c 		u8 *wdst = walk.dst.virt.addr;
walk              112 arch/x86/crypto/blowfish_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              129 arch/x86/crypto/blowfish_glue.c 				  struct skcipher_walk *walk)
walk              132 arch/x86/crypto/blowfish_glue.c 	unsigned int nbytes = walk->nbytes;
walk              133 arch/x86/crypto/blowfish_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              134 arch/x86/crypto/blowfish_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              135 arch/x86/crypto/blowfish_glue.c 	u64 *iv = (u64 *)walk->iv;
walk              147 arch/x86/crypto/blowfish_glue.c 	*(u64 *)walk->iv = *iv;
walk              155 arch/x86/crypto/blowfish_glue.c 	struct skcipher_walk walk;
walk              159 arch/x86/crypto/blowfish_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              161 arch/x86/crypto/blowfish_glue.c 	while ((nbytes = walk.nbytes)) {
walk              162 arch/x86/crypto/blowfish_glue.c 		nbytes = __cbc_encrypt(ctx, &walk);
walk              163 arch/x86/crypto/blowfish_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              170 arch/x86/crypto/blowfish_glue.c 				  struct skcipher_walk *walk)
walk              173 arch/x86/crypto/blowfish_glue.c 	unsigned int nbytes = walk->nbytes;
walk              174 arch/x86/crypto/blowfish_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              175 arch/x86/crypto/blowfish_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              226 arch/x86/crypto/blowfish_glue.c 	*dst ^= *(u64 *)walk->iv;
walk              227 arch/x86/crypto/blowfish_glue.c 	*(u64 *)walk->iv = last_iv;
walk              236 arch/x86/crypto/blowfish_glue.c 	struct skcipher_walk walk;
walk              240 arch/x86/crypto/blowfish_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              242 arch/x86/crypto/blowfish_glue.c 	while ((nbytes = walk.nbytes)) {
walk              243 arch/x86/crypto/blowfish_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
walk              244 arch/x86/crypto/blowfish_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              250 arch/x86/crypto/blowfish_glue.c static void ctr_crypt_final(struct bf_ctx *ctx, struct skcipher_walk *walk)
walk              252 arch/x86/crypto/blowfish_glue.c 	u8 *ctrblk = walk->iv;
walk              254 arch/x86/crypto/blowfish_glue.c 	u8 *src = walk->src.virt.addr;
walk              255 arch/x86/crypto/blowfish_glue.c 	u8 *dst = walk->dst.virt.addr;
walk              256 arch/x86/crypto/blowfish_glue.c 	unsigned int nbytes = walk->nbytes;
walk              264 arch/x86/crypto/blowfish_glue.c static unsigned int __ctr_crypt(struct bf_ctx *ctx, struct skcipher_walk *walk)
walk              267 arch/x86/crypto/blowfish_glue.c 	unsigned int nbytes = walk->nbytes;
walk              268 arch/x86/crypto/blowfish_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              269 arch/x86/crypto/blowfish_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              270 arch/x86/crypto/blowfish_glue.c 	u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
walk              314 arch/x86/crypto/blowfish_glue.c 	*(__be64 *)walk->iv = cpu_to_be64(ctrblk);
walk              322 arch/x86/crypto/blowfish_glue.c 	struct skcipher_walk walk;
walk              326 arch/x86/crypto/blowfish_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              328 arch/x86/crypto/blowfish_glue.c 	while ((nbytes = walk.nbytes) >= BF_BLOCK_SIZE) {
walk              329 arch/x86/crypto/blowfish_glue.c 		nbytes = __ctr_crypt(ctx, &walk);
walk              330 arch/x86/crypto/blowfish_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              334 arch/x86/crypto/blowfish_glue.c 		ctr_crypt_final(ctx, &walk);
walk              335 arch/x86/crypto/blowfish_glue.c 		err = skcipher_walk_done(&walk, 0);
walk               35 arch/x86/crypto/cast5_avx_glue.c static inline bool cast5_fpu_begin(bool fpu_enabled, struct skcipher_walk *walk,
walk               39 arch/x86/crypto/cast5_avx_glue.c 			      walk, fpu_enabled, nbytes);
walk               52 arch/x86/crypto/cast5_avx_glue.c 	struct skcipher_walk walk;
walk               58 arch/x86/crypto/cast5_avx_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               60 arch/x86/crypto/cast5_avx_glue.c 	while ((nbytes = walk.nbytes)) {
walk               61 arch/x86/crypto/cast5_avx_glue.c 		u8 *wsrc = walk.src.virt.addr;
walk               62 arch/x86/crypto/cast5_avx_glue.c 		u8 *wdst = walk.dst.virt.addr;
walk               64 arch/x86/crypto/cast5_avx_glue.c 		fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
walk               93 arch/x86/crypto/cast5_avx_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              115 arch/x86/crypto/cast5_avx_glue.c 	struct skcipher_walk walk;
walk              119 arch/x86/crypto/cast5_avx_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              121 arch/x86/crypto/cast5_avx_glue.c 	while ((nbytes = walk.nbytes)) {
walk              122 arch/x86/crypto/cast5_avx_glue.c 		u64 *src = (u64 *)walk.src.virt.addr;
walk              123 arch/x86/crypto/cast5_avx_glue.c 		u64 *dst = (u64 *)walk.dst.virt.addr;
walk              124 arch/x86/crypto/cast5_avx_glue.c 		u64 *iv = (u64 *)walk.iv;
walk              135 arch/x86/crypto/cast5_avx_glue.c 		*(u64 *)walk.iv = *iv;
walk              136 arch/x86/crypto/cast5_avx_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              143 arch/x86/crypto/cast5_avx_glue.c 				  struct skcipher_walk *walk)
walk              146 arch/x86/crypto/cast5_avx_glue.c 	unsigned int nbytes = walk->nbytes;
walk              147 arch/x86/crypto/cast5_avx_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              148 arch/x86/crypto/cast5_avx_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              190 arch/x86/crypto/cast5_avx_glue.c 	*dst ^= *(u64 *)walk->iv;
walk              191 arch/x86/crypto/cast5_avx_glue.c 	*(u64 *)walk->iv = last_iv;
walk              201 arch/x86/crypto/cast5_avx_glue.c 	struct skcipher_walk walk;
walk              205 arch/x86/crypto/cast5_avx_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              207 arch/x86/crypto/cast5_avx_glue.c 	while ((nbytes = walk.nbytes)) {
walk              208 arch/x86/crypto/cast5_avx_glue.c 		fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
walk              209 arch/x86/crypto/cast5_avx_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
walk              210 arch/x86/crypto/cast5_avx_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              217 arch/x86/crypto/cast5_avx_glue.c static void ctr_crypt_final(struct skcipher_walk *walk, struct cast5_ctx *ctx)
walk              219 arch/x86/crypto/cast5_avx_glue.c 	u8 *ctrblk = walk->iv;
walk              221 arch/x86/crypto/cast5_avx_glue.c 	u8 *src = walk->src.virt.addr;
walk              222 arch/x86/crypto/cast5_avx_glue.c 	u8 *dst = walk->dst.virt.addr;
walk              223 arch/x86/crypto/cast5_avx_glue.c 	unsigned int nbytes = walk->nbytes;
walk              231 arch/x86/crypto/cast5_avx_glue.c static unsigned int __ctr_crypt(struct skcipher_walk *walk,
walk              235 arch/x86/crypto/cast5_avx_glue.c 	unsigned int nbytes = walk->nbytes;
walk              236 arch/x86/crypto/cast5_avx_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              237 arch/x86/crypto/cast5_avx_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              243 arch/x86/crypto/cast5_avx_glue.c 					(__be64 *)walk->iv);
walk              261 arch/x86/crypto/cast5_avx_glue.c 		ctrblk = *(u64 *)walk->iv;
walk              262 arch/x86/crypto/cast5_avx_glue.c 		be64_add_cpu((__be64 *)walk->iv, 1);
walk              281 arch/x86/crypto/cast5_avx_glue.c 	struct skcipher_walk walk;
walk              285 arch/x86/crypto/cast5_avx_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              287 arch/x86/crypto/cast5_avx_glue.c 	while ((nbytes = walk.nbytes) >= CAST5_BLOCK_SIZE) {
walk              288 arch/x86/crypto/cast5_avx_glue.c 		fpu_enabled = cast5_fpu_begin(fpu_enabled, &walk, nbytes);
walk              289 arch/x86/crypto/cast5_avx_glue.c 		nbytes = __ctr_crypt(&walk, ctx);
walk              290 arch/x86/crypto/cast5_avx_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              295 arch/x86/crypto/cast5_avx_glue.c 	if (walk.nbytes) {
walk              296 arch/x86/crypto/cast5_avx_glue.c 		ctr_crypt_final(&walk, ctx);
walk              297 arch/x86/crypto/cast5_avx_glue.c 		err = skcipher_walk_done(&walk, 0);
walk              126 arch/x86/crypto/chacha_glue.c static int chacha_simd_stream_xor(struct skcipher_walk *walk,
walk              138 arch/x86/crypto/chacha_glue.c 	while (walk->nbytes > 0) {
walk              139 arch/x86/crypto/chacha_glue.c 		unsigned int nbytes = walk->nbytes;
walk              141 arch/x86/crypto/chacha_glue.c 		if (nbytes < walk->total) {
walk              142 arch/x86/crypto/chacha_glue.c 			nbytes = round_down(nbytes, walk->stride);
walk              146 arch/x86/crypto/chacha_glue.c 		chacha_dosimd(state, walk->dst.virt.addr, walk->src.virt.addr,
walk              156 arch/x86/crypto/chacha_glue.c 		err = skcipher_walk_done(walk, walk->nbytes - nbytes);
walk              166 arch/x86/crypto/chacha_glue.c 	struct skcipher_walk walk;
walk              172 arch/x86/crypto/chacha_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              177 arch/x86/crypto/chacha_glue.c 	err = chacha_simd_stream_xor(&walk, ctx, req->iv);
walk              186 arch/x86/crypto/chacha_glue.c 	struct skcipher_walk walk;
walk              195 arch/x86/crypto/chacha_glue.c 	err = skcipher_walk_virt(&walk, req, true);
walk              210 arch/x86/crypto/chacha_glue.c 	err = chacha_simd_stream_xor(&walk, &subctx, real_iv);
walk               79 arch/x86/crypto/des3_ede_glue.c 	struct skcipher_walk walk;
walk               83 arch/x86/crypto/des3_ede_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk               85 arch/x86/crypto/des3_ede_glue.c 	while ((nbytes = walk.nbytes)) {
walk               86 arch/x86/crypto/des3_ede_glue.c 		u8 *wsrc = walk.src.virt.addr;
walk               87 arch/x86/crypto/des3_ede_glue.c 		u8 *wdst = walk.dst.virt.addr;
walk              114 arch/x86/crypto/des3_ede_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              137 arch/x86/crypto/des3_ede_glue.c 				  struct skcipher_walk *walk)
walk              140 arch/x86/crypto/des3_ede_glue.c 	unsigned int nbytes = walk->nbytes;
walk              141 arch/x86/crypto/des3_ede_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              142 arch/x86/crypto/des3_ede_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              143 arch/x86/crypto/des3_ede_glue.c 	u64 *iv = (u64 *)walk->iv;
walk              155 arch/x86/crypto/des3_ede_glue.c 	*(u64 *)walk->iv = *iv;
walk              163 arch/x86/crypto/des3_ede_glue.c 	struct skcipher_walk walk;
walk              167 arch/x86/crypto/des3_ede_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              169 arch/x86/crypto/des3_ede_glue.c 	while ((nbytes = walk.nbytes)) {
walk              170 arch/x86/crypto/des3_ede_glue.c 		nbytes = __cbc_encrypt(ctx, &walk);
walk              171 arch/x86/crypto/des3_ede_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              178 arch/x86/crypto/des3_ede_glue.c 				  struct skcipher_walk *walk)
walk              181 arch/x86/crypto/des3_ede_glue.c 	unsigned int nbytes = walk->nbytes;
walk              182 arch/x86/crypto/des3_ede_glue.c 	u64 *src = (u64 *)walk->src.virt.addr;
walk              183 arch/x86/crypto/des3_ede_glue.c 	u64 *dst = (u64 *)walk->dst.virt.addr;
walk              232 arch/x86/crypto/des3_ede_glue.c 	*dst ^= *(u64 *)walk->iv;
walk              233 arch/x86/crypto/des3_ede_glue.c 	*(u64 *)walk->iv = last_iv;
walk              242 arch/x86/crypto/des3_ede_glue.c 	struct skcipher_walk walk;
walk              246 arch/x86/crypto/des3_ede_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              248 arch/x86/crypto/des3_ede_glue.c 	while ((nbytes = walk.nbytes)) {
walk              249 arch/x86/crypto/des3_ede_glue.c 		nbytes = __cbc_decrypt(ctx, &walk);
walk              250 arch/x86/crypto/des3_ede_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              257 arch/x86/crypto/des3_ede_glue.c 			    struct skcipher_walk *walk)
walk              259 arch/x86/crypto/des3_ede_glue.c 	u8 *ctrblk = walk->iv;
walk              261 arch/x86/crypto/des3_ede_glue.c 	u8 *src = walk->src.virt.addr;
walk              262 arch/x86/crypto/des3_ede_glue.c 	u8 *dst = walk->dst.virt.addr;
walk              263 arch/x86/crypto/des3_ede_glue.c 	unsigned int nbytes = walk->nbytes;
walk              272 arch/x86/crypto/des3_ede_glue.c 				struct skcipher_walk *walk)
walk              275 arch/x86/crypto/des3_ede_glue.c 	unsigned int nbytes = walk->nbytes;
walk              276 arch/x86/crypto/des3_ede_glue.c 	__be64 *src = (__be64 *)walk->src.virt.addr;
walk              277 arch/x86/crypto/des3_ede_glue.c 	__be64 *dst = (__be64 *)walk->dst.virt.addr;
walk              278 arch/x86/crypto/des3_ede_glue.c 	u64 ctrblk = be64_to_cpu(*(__be64 *)walk->iv);
walk              317 arch/x86/crypto/des3_ede_glue.c 	*(__be64 *)walk->iv = cpu_to_be64(ctrblk);
walk              325 arch/x86/crypto/des3_ede_glue.c 	struct skcipher_walk walk;
walk              329 arch/x86/crypto/des3_ede_glue.c 	err = skcipher_walk_virt(&walk, req, false);
walk              331 arch/x86/crypto/des3_ede_glue.c 	while ((nbytes = walk.nbytes) >= DES3_EDE_BLOCK_SIZE) {
walk              332 arch/x86/crypto/des3_ede_glue.c 		nbytes = __ctr_crypt(ctx, &walk);
walk              333 arch/x86/crypto/des3_ede_glue.c 		err = skcipher_walk_done(&walk, nbytes);
walk              337 arch/x86/crypto/des3_ede_glue.c 		ctr_crypt_final(ctx, &walk);
walk              338 arch/x86/crypto/des3_ede_glue.c 		err = skcipher_walk_done(&walk, 0);
walk               26 arch/x86/crypto/glue_helper.c 	struct skcipher_walk walk;
walk               31 arch/x86/crypto/glue_helper.c 	err = skcipher_walk_virt(&walk, req, false);
walk               33 arch/x86/crypto/glue_helper.c 	while ((nbytes = walk.nbytes)) {
walk               34 arch/x86/crypto/glue_helper.c 		const u8 *src = walk.src.virt.addr;
walk               35 arch/x86/crypto/glue_helper.c 		u8 *dst = walk.dst.virt.addr;
walk               40 arch/x86/crypto/glue_helper.c 					     &walk, fpu_enabled, nbytes);
walk               58 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, nbytes);
walk               71 arch/x86/crypto/glue_helper.c 	struct skcipher_walk walk;
walk               75 arch/x86/crypto/glue_helper.c 	err = skcipher_walk_virt(&walk, req, false);
walk               77 arch/x86/crypto/glue_helper.c 	while ((nbytes = walk.nbytes)) {
walk               78 arch/x86/crypto/glue_helper.c 		const u128 *src = (u128 *)walk.src.virt.addr;
walk               79 arch/x86/crypto/glue_helper.c 		u128 *dst = (u128 *)walk.dst.virt.addr;
walk               80 arch/x86/crypto/glue_helper.c 		u128 *iv = (u128 *)walk.iv;
walk               91 arch/x86/crypto/glue_helper.c 		*(u128 *)walk.iv = *iv;
walk               92 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, nbytes);
walk              103 arch/x86/crypto/glue_helper.c 	struct skcipher_walk walk;
walk              108 arch/x86/crypto/glue_helper.c 	err = skcipher_walk_virt(&walk, req, false);
walk              110 arch/x86/crypto/glue_helper.c 	while ((nbytes = walk.nbytes)) {
walk              111 arch/x86/crypto/glue_helper.c 		const u128 *src = walk.src.virt.addr;
walk              112 arch/x86/crypto/glue_helper.c 		u128 *dst = walk.dst.virt.addr;
walk              118 arch/x86/crypto/glue_helper.c 					     &walk, fpu_enabled, nbytes);
walk              148 arch/x86/crypto/glue_helper.c 		u128_xor(dst, dst, (u128 *)walk.iv);
walk              149 arch/x86/crypto/glue_helper.c 		*(u128 *)walk.iv = last_iv;
walk              150 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, nbytes);
walk              163 arch/x86/crypto/glue_helper.c 	struct skcipher_walk walk;
walk              168 arch/x86/crypto/glue_helper.c 	err = skcipher_walk_virt(&walk, req, false);
walk              170 arch/x86/crypto/glue_helper.c 	while ((nbytes = walk.nbytes) >= bsize) {
walk              171 arch/x86/crypto/glue_helper.c 		const u128 *src = walk.src.virt.addr;
walk              172 arch/x86/crypto/glue_helper.c 		u128 *dst = walk.dst.virt.addr;
walk              178 arch/x86/crypto/glue_helper.c 					     &walk, fpu_enabled, nbytes);
walk              180 arch/x86/crypto/glue_helper.c 		be128_to_le128(&ctrblk, (be128 *)walk.iv);
walk              201 arch/x86/crypto/glue_helper.c 		le128_to_be128((be128 *)walk.iv, &ctrblk);
walk              202 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, nbytes);
walk              211 arch/x86/crypto/glue_helper.c 		be128_to_le128(&ctrblk, (be128 *)walk.iv);
walk              212 arch/x86/crypto/glue_helper.c 		memcpy(&tmp, walk.src.virt.addr, nbytes);
walk              215 arch/x86/crypto/glue_helper.c 		memcpy(walk.dst.virt.addr, &tmp, nbytes);
walk              216 arch/x86/crypto/glue_helper.c 		le128_to_be128((be128 *)walk.iv, &ctrblk);
walk              218 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, 0);
walk              227 arch/x86/crypto/glue_helper.c 					  struct skcipher_walk *walk)
walk              230 arch/x86/crypto/glue_helper.c 	unsigned int nbytes = walk->nbytes;
walk              231 arch/x86/crypto/glue_helper.c 	u128 *src = walk->src.virt.addr;
walk              232 arch/x86/crypto/glue_helper.c 	u128 *dst = walk->dst.virt.addr;
walk              244 arch/x86/crypto/glue_helper.c 							walk->iv);
walk              268 arch/x86/crypto/glue_helper.c 	struct skcipher_walk walk;
walk              290 arch/x86/crypto/glue_helper.c 	err = skcipher_walk_virt(&walk, req, false);
walk              291 arch/x86/crypto/glue_helper.c 	nbytes = walk.nbytes;
walk              297 arch/x86/crypto/glue_helper.c 				     &walk, fpu_enabled,
walk              301 arch/x86/crypto/glue_helper.c 	tweak_fn(tweak_ctx, walk.iv, walk.iv);
walk              304 arch/x86/crypto/glue_helper.c 		nbytes = __glue_xts_req_128bit(gctx, crypt_ctx, &walk);
walk              306 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_done(&walk, nbytes);
walk              307 arch/x86/crypto/glue_helper.c 		nbytes = walk.nbytes;
walk              330 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_virt(&walk, req, false) ?:
walk              331 arch/x86/crypto/glue_helper.c 		      skcipher_walk_done(&walk,
walk              332 arch/x86/crypto/glue_helper.c 				__glue_xts_req_128bit(gctx, crypt_ctx, &walk));
walk              345 arch/x86/crypto/glue_helper.c 		err = skcipher_walk_virt(&walk, req, false) ?:
walk              346 arch/x86/crypto/glue_helper.c 		      skcipher_walk_done(&walk,
walk              347 arch/x86/crypto/glue_helper.c 				__glue_xts_req_128bit(gctx, crypt_ctx, &walk));
walk               48 arch/x86/include/asm/crypto/glue_helper.h 				  struct skcipher_walk *walk,
walk               65 arch/x86/include/asm/crypto/glue_helper.h 	skcipher_walk_atomise(walk);
walk               40 crypto/ablkcipher.c void __ablkcipher_walk_complete(struct ablkcipher_walk *walk)
walk               44 crypto/ablkcipher.c 	list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
walk               52 crypto/ablkcipher.c static inline void ablkcipher_queue_write(struct ablkcipher_walk *walk,
walk               55 crypto/ablkcipher.c 	p->dst = walk->out;
walk               56 crypto/ablkcipher.c 	list_add_tail(&p->entry, &walk->buffers);
walk               69 crypto/ablkcipher.c static inline void ablkcipher_done_slow(struct ablkcipher_walk *walk,
walk               73 crypto/ablkcipher.c 		unsigned int len_this_page = scatterwalk_pagelen(&walk->out);
walk               77 crypto/ablkcipher.c 		scatterwalk_advance(&walk->out, n);
walk               81 crypto/ablkcipher.c 		scatterwalk_start(&walk->out, sg_next(walk->out.sg));
walk               85 crypto/ablkcipher.c static inline void ablkcipher_done_fast(struct ablkcipher_walk *walk,
walk               88 crypto/ablkcipher.c 	scatterwalk_advance(&walk->in, n);
walk               89 crypto/ablkcipher.c 	scatterwalk_advance(&walk->out, n);
walk               93 crypto/ablkcipher.c 				struct ablkcipher_walk *walk);
walk               96 crypto/ablkcipher.c 			 struct ablkcipher_walk *walk, int err)
walk              105 crypto/ablkcipher.c 	n = walk->nbytes - err;
walk              106 crypto/ablkcipher.c 	walk->total -= n;
walk              107 crypto/ablkcipher.c 	more = (walk->total != 0);
walk              109 crypto/ablkcipher.c 	if (likely(!(walk->flags & ABLKCIPHER_WALK_SLOW))) {
walk              110 crypto/ablkcipher.c 		ablkcipher_done_fast(walk, n);
walk              117 crypto/ablkcipher.c 		ablkcipher_done_slow(walk, n);
walk              120 crypto/ablkcipher.c 	scatterwalk_done(&walk->in, 0, more);
walk              121 crypto/ablkcipher.c 	scatterwalk_done(&walk->out, 1, more);
walk              125 crypto/ablkcipher.c 		return ablkcipher_walk_next(req, walk);
walk              129 crypto/ablkcipher.c 	walk->nbytes = 0;
walk              130 crypto/ablkcipher.c 	if (walk->iv != req->info)
walk              131 crypto/ablkcipher.c 		memcpy(req->info, walk->iv, tfm->crt_ablkcipher.ivsize);
walk              132 crypto/ablkcipher.c 	kfree(walk->iv_buffer);
walk              138 crypto/ablkcipher.c 				       struct ablkcipher_walk *walk,
walk              154 crypto/ablkcipher.c 		return ablkcipher_walk_done(req, walk, -ENOMEM);
walk              164 crypto/ablkcipher.c 	scatterwalk_copychunks(src, &walk->in, bsize, 0);
walk              166 crypto/ablkcipher.c 	ablkcipher_queue_write(walk, p);
walk              168 crypto/ablkcipher.c 	walk->nbytes = bsize;
walk              169 crypto/ablkcipher.c 	walk->flags |= ABLKCIPHER_WALK_SLOW;
walk              177 crypto/ablkcipher.c static inline int ablkcipher_copy_iv(struct ablkcipher_walk *walk,
walk              181 crypto/ablkcipher.c 	unsigned bs = walk->blocksize;
walk              189 crypto/ablkcipher.c 	walk->iv_buffer = kmalloc(size, GFP_ATOMIC);
walk              190 crypto/ablkcipher.c 	if (!walk->iv_buffer)
walk              193 crypto/ablkcipher.c 	iv = (u8 *)ALIGN((unsigned long)walk->iv_buffer, alignmask + 1);
walk              198 crypto/ablkcipher.c 	walk->iv = memcpy(iv, walk->iv, ivsize);
walk              203 crypto/ablkcipher.c 				       struct ablkcipher_walk *walk)
walk              205 crypto/ablkcipher.c 	walk->src.page = scatterwalk_page(&walk->in);
walk              206 crypto/ablkcipher.c 	walk->src.offset = offset_in_page(walk->in.offset);
walk              207 crypto/ablkcipher.c 	walk->dst.page = scatterwalk_page(&walk->out);
walk              208 crypto/ablkcipher.c 	walk->dst.offset = offset_in_page(walk->out.offset);
walk              214 crypto/ablkcipher.c 				struct ablkcipher_walk *walk)
walk              222 crypto/ablkcipher.c 	n = walk->total;
walk              225 crypto/ablkcipher.c 		return ablkcipher_walk_done(req, walk, -EINVAL);
walk              228 crypto/ablkcipher.c 	walk->flags &= ~ABLKCIPHER_WALK_SLOW;
walk              231 crypto/ablkcipher.c 	bsize = min(walk->blocksize, n);
walk              232 crypto/ablkcipher.c 	n = scatterwalk_clamp(&walk->in, n);
walk              233 crypto/ablkcipher.c 	n = scatterwalk_clamp(&walk->out, n);
walk              236 crypto/ablkcipher.c 	    !scatterwalk_aligned(&walk->in, alignmask) ||
walk              237 crypto/ablkcipher.c 	    !scatterwalk_aligned(&walk->out, alignmask)) {
walk              238 crypto/ablkcipher.c 		err = ablkcipher_next_slow(req, walk, bsize, alignmask,
walk              243 crypto/ablkcipher.c 	walk->nbytes = n;
walk              245 crypto/ablkcipher.c 	return ablkcipher_next_fast(req, walk);
walk              249 crypto/ablkcipher.c 		walk->src.page = virt_to_page(src);
walk              250 crypto/ablkcipher.c 		walk->dst.page = virt_to_page(dst);
walk              251 crypto/ablkcipher.c 		walk->src.offset = ((unsigned long)src & (PAGE_SIZE - 1));
walk              252 crypto/ablkcipher.c 		walk->dst.offset = ((unsigned long)dst & (PAGE_SIZE - 1));
walk              259 crypto/ablkcipher.c 				 struct ablkcipher_walk *walk)
walk              268 crypto/ablkcipher.c 	walk->iv = req->info;
walk              269 crypto/ablkcipher.c 	walk->nbytes = walk->total;
walk              270 crypto/ablkcipher.c 	if (unlikely(!walk->total))
walk              273 crypto/ablkcipher.c 	walk->iv_buffer = NULL;
walk              274 crypto/ablkcipher.c 	if (unlikely(((unsigned long)walk->iv & alignmask))) {
walk              275 crypto/ablkcipher.c 		int err = ablkcipher_copy_iv(walk, tfm, alignmask);
walk              281 crypto/ablkcipher.c 	scatterwalk_start(&walk->in, walk->in.sg);
walk              282 crypto/ablkcipher.c 	scatterwalk_start(&walk->out, walk->out.sg);
walk              284 crypto/ablkcipher.c 	return ablkcipher_walk_next(req, walk);
walk              288 crypto/ablkcipher.c 			 struct ablkcipher_walk *walk)
walk              290 crypto/ablkcipher.c 	walk->blocksize = crypto_tfm_alg_blocksize(req->base.tfm);
walk              291 crypto/ablkcipher.c 	return ablkcipher_walk_first(req, walk);
walk               39 crypto/aegis128-core.c 	int (*skcipher_walk_init)(struct skcipher_walk *walk,
walk              285 crypto/aegis128-core.c 	struct scatter_walk walk;
walk              289 crypto/aegis128-core.c 	scatterwalk_start(&walk, sg_src);
walk              291 crypto/aegis128-core.c 		unsigned int size = scatterwalk_clamp(&walk, assoclen);
walk              293 crypto/aegis128-core.c 		void *mapped = scatterwalk_map(&walk);
walk              316 crypto/aegis128-core.c 		scatterwalk_advance(&walk, size);
walk              317 crypto/aegis128-core.c 		scatterwalk_done(&walk, 0, assoclen);
walk              330 crypto/aegis128-core.c 	struct skcipher_walk walk;
walk              332 crypto/aegis128-core.c 	ops->skcipher_walk_init(&walk, req, false);
walk              334 crypto/aegis128-core.c 	while (walk.nbytes) {
walk              335 crypto/aegis128-core.c 		unsigned int nbytes = walk.nbytes;
walk              337 crypto/aegis128-core.c 		if (nbytes < walk.total)
walk              338 crypto/aegis128-core.c 			nbytes = round_down(nbytes, walk.stride);
walk              340 crypto/aegis128-core.c 		ops->crypt_chunk(state, walk.dst.virt.addr, walk.src.virt.addr,
walk              343 crypto/aegis128-core.c 		skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk               40 crypto/ahash.c static int hash_walk_next(struct crypto_hash_walk *walk)
walk               42 crypto/ahash.c 	unsigned int alignmask = walk->alignmask;
walk               43 crypto/ahash.c 	unsigned int offset = walk->offset;
walk               44 crypto/ahash.c 	unsigned int nbytes = min(walk->entrylen,
walk               47 crypto/ahash.c 	if (walk->flags & CRYPTO_ALG_ASYNC)
walk               48 crypto/ahash.c 		walk->data = kmap(walk->pg);
walk               50 crypto/ahash.c 		walk->data = kmap_atomic(walk->pg);
walk               51 crypto/ahash.c 	walk->data += offset;
walk               60 crypto/ahash.c 	walk->entrylen -= nbytes;
walk               64 crypto/ahash.c static int hash_walk_new_entry(struct crypto_hash_walk *walk)
walk               68 crypto/ahash.c 	sg = walk->sg;
walk               69 crypto/ahash.c 	walk->offset = sg->offset;
walk               70 crypto/ahash.c 	walk->pg = sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
walk               71 crypto/ahash.c 	walk->offset = offset_in_page(walk->offset);
walk               72 crypto/ahash.c 	walk->entrylen = sg->length;
walk               74 crypto/ahash.c 	if (walk->entrylen > walk->total)
walk               75 crypto/ahash.c 		walk->entrylen = walk->total;
walk               76 crypto/ahash.c 	walk->total -= walk->entrylen;
walk               78 crypto/ahash.c 	return hash_walk_next(walk);
walk               81 crypto/ahash.c int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err)
walk               83 crypto/ahash.c 	unsigned int alignmask = walk->alignmask;
walk               85 crypto/ahash.c 	walk->data -= walk->offset;
walk               87 crypto/ahash.c 	if (walk->entrylen && (walk->offset & alignmask) && !err) {
walk               90 crypto/ahash.c 		walk->offset = ALIGN(walk->offset, alignmask + 1);
walk               91 crypto/ahash.c 		nbytes = min(walk->entrylen,
walk               92 crypto/ahash.c 			     (unsigned int)(PAGE_SIZE - walk->offset));
walk               94 crypto/ahash.c 			walk->entrylen -= nbytes;
walk               95 crypto/ahash.c 			walk->data += walk->offset;
walk              100 crypto/ahash.c 	if (walk->flags & CRYPTO_ALG_ASYNC)
walk              101 crypto/ahash.c 		kunmap(walk->pg);
walk              103 crypto/ahash.c 		kunmap_atomic(walk->data);
walk              108 crypto/ahash.c 		crypto_yield(walk->flags);
walk              114 crypto/ahash.c 	if (walk->entrylen) {
walk              115 crypto/ahash.c 		walk->offset = 0;
walk              116 crypto/ahash.c 		walk->pg++;
walk              117 crypto/ahash.c 		return hash_walk_next(walk);
walk              120 crypto/ahash.c 	if (!walk->total)
walk              123 crypto/ahash.c 	walk->sg = sg_next(walk->sg);
walk              125 crypto/ahash.c 	return hash_walk_new_entry(walk);
walk              130 crypto/ahash.c 			   struct crypto_hash_walk *walk)
walk              132 crypto/ahash.c 	walk->total = req->nbytes;
walk              134 crypto/ahash.c 	if (!walk->total) {
walk              135 crypto/ahash.c 		walk->entrylen = 0;
walk              139 crypto/ahash.c 	walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
walk              140 crypto/ahash.c 	walk->sg = req->src;
walk              141 crypto/ahash.c 	walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK;
walk              143 crypto/ahash.c 	return hash_walk_new_entry(walk);
walk              148 crypto/ahash.c 			    struct crypto_hash_walk *walk)
walk              150 crypto/ahash.c 	walk->total = req->nbytes;
walk              152 crypto/ahash.c 	if (!walk->total) {
walk              153 crypto/ahash.c 		walk->entrylen = 0;
walk              157 crypto/ahash.c 	walk->alignmask = crypto_ahash_alignmask(crypto_ahash_reqtfm(req));
walk              158 crypto/ahash.c 	walk->sg = req->src;
walk              159 crypto/ahash.c 	walk->flags = req->base.flags & CRYPTO_TFM_REQ_MASK;
walk              160 crypto/ahash.c 	walk->flags |= CRYPTO_ALG_ASYNC;
walk              164 crypto/ahash.c 	return hash_walk_new_entry(walk);
walk               28 crypto/arc4.c  	struct skcipher_walk walk;
walk               31 crypto/arc4.c  	err = skcipher_walk_virt(&walk, req, false);
walk               33 crypto/arc4.c  	while (walk.nbytes > 0) {
walk               34 crypto/arc4.c  		arc4_crypt(ctx, walk.dst.virt.addr, walk.src.virt.addr,
walk               35 crypto/arc4.c  			   walk.nbytes);
walk               36 crypto/arc4.c  		err = skcipher_walk_done(&walk, 0);
walk               35 crypto/blkcipher.c 			       struct blkcipher_walk *walk);
walk               37 crypto/blkcipher.c 				struct blkcipher_walk *walk);
walk               39 crypto/blkcipher.c static inline void blkcipher_map_src(struct blkcipher_walk *walk)
walk               41 crypto/blkcipher.c 	walk->src.virt.addr = scatterwalk_map(&walk->in);
walk               44 crypto/blkcipher.c static inline void blkcipher_map_dst(struct blkcipher_walk *walk)
walk               46 crypto/blkcipher.c 	walk->dst.virt.addr = scatterwalk_map(&walk->out);
walk               49 crypto/blkcipher.c static inline void blkcipher_unmap_src(struct blkcipher_walk *walk)
walk               51 crypto/blkcipher.c 	scatterwalk_unmap(walk->src.virt.addr);
walk               54 crypto/blkcipher.c static inline void blkcipher_unmap_dst(struct blkcipher_walk *walk)
walk               56 crypto/blkcipher.c 	scatterwalk_unmap(walk->dst.virt.addr);
walk               68 crypto/blkcipher.c static inline void blkcipher_done_slow(struct blkcipher_walk *walk,
walk               73 crypto/blkcipher.c 	addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
walk               75 crypto/blkcipher.c 	scatterwalk_copychunks(addr, &walk->out, bsize, 1);
walk               78 crypto/blkcipher.c static inline void blkcipher_done_fast(struct blkcipher_walk *walk,
walk               81 crypto/blkcipher.c 	if (walk->flags & BLKCIPHER_WALK_COPY) {
walk               82 crypto/blkcipher.c 		blkcipher_map_dst(walk);
walk               83 crypto/blkcipher.c 		memcpy(walk->dst.virt.addr, walk->page, n);
walk               84 crypto/blkcipher.c 		blkcipher_unmap_dst(walk);
walk               85 crypto/blkcipher.c 	} else if (!(walk->flags & BLKCIPHER_WALK_PHYS)) {
walk               86 crypto/blkcipher.c 		if (walk->flags & BLKCIPHER_WALK_DIFF)
walk               87 crypto/blkcipher.c 			blkcipher_unmap_dst(walk);
walk               88 crypto/blkcipher.c 		blkcipher_unmap_src(walk);
walk               91 crypto/blkcipher.c 	scatterwalk_advance(&walk->in, n);
walk               92 crypto/blkcipher.c 	scatterwalk_advance(&walk->out, n);
walk               96 crypto/blkcipher.c 			struct blkcipher_walk *walk, int err)
walk              104 crypto/blkcipher.c 	n = walk->nbytes - err;
walk              105 crypto/blkcipher.c 	walk->total -= n;
walk              106 crypto/blkcipher.c 	more = (walk->total != 0);
walk              108 crypto/blkcipher.c 	if (likely(!(walk->flags & BLKCIPHER_WALK_SLOW))) {
walk              109 crypto/blkcipher.c 		blkcipher_done_fast(walk, n);
walk              116 crypto/blkcipher.c 		blkcipher_done_slow(walk, n);
walk              119 crypto/blkcipher.c 	scatterwalk_done(&walk->in, 0, more);
walk              120 crypto/blkcipher.c 	scatterwalk_done(&walk->out, 1, more);
walk              124 crypto/blkcipher.c 		return blkcipher_walk_next(desc, walk);
walk              128 crypto/blkcipher.c 	walk->nbytes = 0;
walk              129 crypto/blkcipher.c 	if (walk->iv != desc->info)
walk              130 crypto/blkcipher.c 		memcpy(desc->info, walk->iv, walk->ivsize);
walk              131 crypto/blkcipher.c 	if (walk->buffer != walk->page)
walk              132 crypto/blkcipher.c 		kfree(walk->buffer);
walk              133 crypto/blkcipher.c 	if (walk->page)
walk              134 crypto/blkcipher.c 		free_page((unsigned long)walk->page);
walk              140 crypto/blkcipher.c 				      struct blkcipher_walk *walk,
walk              147 crypto/blkcipher.c 	if (walk->buffer)
walk              150 crypto/blkcipher.c 	walk->buffer = walk->page;
walk              151 crypto/blkcipher.c 	if (walk->buffer)
walk              156 crypto/blkcipher.c 	walk->buffer = kmalloc(n, GFP_ATOMIC);
walk              157 crypto/blkcipher.c 	if (!walk->buffer)
walk              158 crypto/blkcipher.c 		return blkcipher_walk_done(desc, walk, -ENOMEM);
walk              161 crypto/blkcipher.c 	walk->dst.virt.addr = (u8 *)ALIGN((unsigned long)walk->buffer,
walk              163 crypto/blkcipher.c 	walk->dst.virt.addr = blkcipher_get_spot(walk->dst.virt.addr, bsize);
walk              164 crypto/blkcipher.c 	walk->src.virt.addr = blkcipher_get_spot(walk->dst.virt.addr +
walk              167 crypto/blkcipher.c 	scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
walk              169 crypto/blkcipher.c 	walk->nbytes = bsize;
walk              170 crypto/blkcipher.c 	walk->flags |= BLKCIPHER_WALK_SLOW;
walk              175 crypto/blkcipher.c static inline int blkcipher_next_copy(struct blkcipher_walk *walk)
walk              177 crypto/blkcipher.c 	u8 *tmp = walk->page;
walk              179 crypto/blkcipher.c 	blkcipher_map_src(walk);
walk              180 crypto/blkcipher.c 	memcpy(tmp, walk->src.virt.addr, walk->nbytes);
walk              181 crypto/blkcipher.c 	blkcipher_unmap_src(walk);
walk              183 crypto/blkcipher.c 	walk->src.virt.addr = tmp;
walk              184 crypto/blkcipher.c 	walk->dst.virt.addr = tmp;
walk              190 crypto/blkcipher.c 				      struct blkcipher_walk *walk)
walk              194 crypto/blkcipher.c 	walk->src.phys.page = scatterwalk_page(&walk->in);
walk              195 crypto/blkcipher.c 	walk->src.phys.offset = offset_in_page(walk->in.offset);
walk              196 crypto/blkcipher.c 	walk->dst.phys.page = scatterwalk_page(&walk->out);
walk              197 crypto/blkcipher.c 	walk->dst.phys.offset = offset_in_page(walk->out.offset);
walk              199 crypto/blkcipher.c 	if (walk->flags & BLKCIPHER_WALK_PHYS)
walk              202 crypto/blkcipher.c 	diff = walk->src.phys.offset - walk->dst.phys.offset;
walk              203 crypto/blkcipher.c 	diff |= walk->src.virt.page - walk->dst.virt.page;
walk              205 crypto/blkcipher.c 	blkcipher_map_src(walk);
walk              206 crypto/blkcipher.c 	walk->dst.virt.addr = walk->src.virt.addr;
walk              209 crypto/blkcipher.c 		walk->flags |= BLKCIPHER_WALK_DIFF;
walk              210 crypto/blkcipher.c 		blkcipher_map_dst(walk);
walk              217 crypto/blkcipher.c 			       struct blkcipher_walk *walk)
walk              223 crypto/blkcipher.c 	n = walk->total;
walk              224 crypto/blkcipher.c 	if (unlikely(n < walk->cipher_blocksize)) {
walk              226 crypto/blkcipher.c 		return blkcipher_walk_done(desc, walk, -EINVAL);
walk              229 crypto/blkcipher.c 	bsize = min(walk->walk_blocksize, n);
walk              231 crypto/blkcipher.c 	walk->flags &= ~(BLKCIPHER_WALK_SLOW | BLKCIPHER_WALK_COPY |
walk              233 crypto/blkcipher.c 	if (!scatterwalk_aligned(&walk->in, walk->alignmask) ||
walk              234 crypto/blkcipher.c 	    !scatterwalk_aligned(&walk->out, walk->alignmask)) {
walk              235 crypto/blkcipher.c 		walk->flags |= BLKCIPHER_WALK_COPY;
walk              236 crypto/blkcipher.c 		if (!walk->page) {
walk              237 crypto/blkcipher.c 			walk->page = (void *)__get_free_page(GFP_ATOMIC);
walk              238 crypto/blkcipher.c 			if (!walk->page)
walk              243 crypto/blkcipher.c 	n = scatterwalk_clamp(&walk->in, n);
walk              244 crypto/blkcipher.c 	n = scatterwalk_clamp(&walk->out, n);
walk              247 crypto/blkcipher.c 		err = blkcipher_next_slow(desc, walk, bsize, walk->alignmask);
walk              251 crypto/blkcipher.c 	walk->nbytes = n;
walk              252 crypto/blkcipher.c 	if (walk->flags & BLKCIPHER_WALK_COPY) {
walk              253 crypto/blkcipher.c 		err = blkcipher_next_copy(walk);
walk              257 crypto/blkcipher.c 	return blkcipher_next_fast(desc, walk);
walk              260 crypto/blkcipher.c 	if (walk->flags & BLKCIPHER_WALK_PHYS) {
walk              261 crypto/blkcipher.c 		walk->src.phys.page = virt_to_page(walk->src.virt.addr);
walk              262 crypto/blkcipher.c 		walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
walk              263 crypto/blkcipher.c 		walk->src.phys.offset &= PAGE_SIZE - 1;
walk              264 crypto/blkcipher.c 		walk->dst.phys.offset &= PAGE_SIZE - 1;
walk              269 crypto/blkcipher.c static inline int blkcipher_copy_iv(struct blkcipher_walk *walk)
walk              271 crypto/blkcipher.c 	unsigned bs = walk->walk_blocksize;
walk              272 crypto/blkcipher.c 	unsigned aligned_bs = ALIGN(bs, walk->alignmask + 1);
walk              274 crypto/blkcipher.c 			    walk->ivsize + max(aligned_bs, walk->ivsize) -
walk              275 crypto/blkcipher.c 			    (walk->alignmask + 1);
walk              278 crypto/blkcipher.c 	size += walk->alignmask & ~(crypto_tfm_ctx_alignment() - 1);
walk              279 crypto/blkcipher.c 	walk->buffer = kmalloc(size, GFP_ATOMIC);
walk              280 crypto/blkcipher.c 	if (!walk->buffer)
walk              283 crypto/blkcipher.c 	iv = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
walk              286 crypto/blkcipher.c 	iv = blkcipher_get_spot(iv, walk->ivsize);
walk              288 crypto/blkcipher.c 	walk->iv = memcpy(iv, walk->iv, walk->ivsize);
walk              293 crypto/blkcipher.c 			struct blkcipher_walk *walk)
walk              295 crypto/blkcipher.c 	walk->flags &= ~BLKCIPHER_WALK_PHYS;
walk              296 crypto/blkcipher.c 	walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
walk              297 crypto/blkcipher.c 	walk->cipher_blocksize = walk->walk_blocksize;
walk              298 crypto/blkcipher.c 	walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
walk              299 crypto/blkcipher.c 	walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
walk              300 crypto/blkcipher.c 	return blkcipher_walk_first(desc, walk);
walk              305 crypto/blkcipher.c 			struct blkcipher_walk *walk)
walk              307 crypto/blkcipher.c 	walk->flags |= BLKCIPHER_WALK_PHYS;
walk              308 crypto/blkcipher.c 	walk->walk_blocksize = crypto_blkcipher_blocksize(desc->tfm);
walk              309 crypto/blkcipher.c 	walk->cipher_blocksize = walk->walk_blocksize;
walk              310 crypto/blkcipher.c 	walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
walk              311 crypto/blkcipher.c 	walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
walk              312 crypto/blkcipher.c 	return blkcipher_walk_first(desc, walk);
walk              317 crypto/blkcipher.c 				struct blkcipher_walk *walk)
walk              322 crypto/blkcipher.c 	walk->iv = desc->info;
walk              323 crypto/blkcipher.c 	walk->nbytes = walk->total;
walk              324 crypto/blkcipher.c 	if (unlikely(!walk->total))
walk              327 crypto/blkcipher.c 	walk->buffer = NULL;
walk              328 crypto/blkcipher.c 	if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
walk              329 crypto/blkcipher.c 		int err = blkcipher_copy_iv(walk);
walk              334 crypto/blkcipher.c 	scatterwalk_start(&walk->in, walk->in.sg);
walk              335 crypto/blkcipher.c 	scatterwalk_start(&walk->out, walk->out.sg);
walk              336 crypto/blkcipher.c 	walk->page = NULL;
walk              338 crypto/blkcipher.c 	return blkcipher_walk_next(desc, walk);
walk              342 crypto/blkcipher.c 			      struct blkcipher_walk *walk,
walk              345 crypto/blkcipher.c 	walk->flags &= ~BLKCIPHER_WALK_PHYS;
walk              346 crypto/blkcipher.c 	walk->walk_blocksize = blocksize;
walk              347 crypto/blkcipher.c 	walk->cipher_blocksize = crypto_blkcipher_blocksize(desc->tfm);
walk              348 crypto/blkcipher.c 	walk->ivsize = crypto_blkcipher_ivsize(desc->tfm);
walk              349 crypto/blkcipher.c 	walk->alignmask = crypto_blkcipher_alignmask(desc->tfm);
walk              350 crypto/blkcipher.c 	return blkcipher_walk_first(desc, walk);
walk              355 crypto/blkcipher.c 				   struct blkcipher_walk *walk,
walk              359 crypto/blkcipher.c 	walk->flags &= ~BLKCIPHER_WALK_PHYS;
walk              360 crypto/blkcipher.c 	walk->walk_blocksize = blocksize;
walk              361 crypto/blkcipher.c 	walk->cipher_blocksize = crypto_aead_blocksize(tfm);
walk              362 crypto/blkcipher.c 	walk->ivsize = crypto_aead_ivsize(tfm);
walk              363 crypto/blkcipher.c 	walk->alignmask = crypto_aead_alignmask(tfm);
walk              364 crypto/blkcipher.c 	return blkcipher_walk_first(desc, walk);
walk               37 crypto/cbc.c   	struct skcipher_walk walk;
walk               40 crypto/cbc.c   	err = skcipher_walk_virt(&walk, req, false);
walk               42 crypto/cbc.c   	while (walk.nbytes) {
walk               43 crypto/cbc.c   		err = crypto_cbc_decrypt_blocks(&walk, tfm,
walk               45 crypto/cbc.c   		err = skcipher_walk_done(&walk, err);
walk               42 crypto/cfb.c   static void crypto_cfb_final(struct skcipher_walk *walk,
walk               48 crypto/cfb.c   	u8 *src = walk->src.virt.addr;
walk               49 crypto/cfb.c   	u8 *dst = walk->dst.virt.addr;
walk               50 crypto/cfb.c   	u8 *iv = walk->iv;
walk               51 crypto/cfb.c   	unsigned int nbytes = walk->nbytes;
walk               57 crypto/cfb.c   static int crypto_cfb_encrypt_segment(struct skcipher_walk *walk,
walk               61 crypto/cfb.c   	unsigned int nbytes = walk->nbytes;
walk               62 crypto/cfb.c   	u8 *src = walk->src.virt.addr;
walk               63 crypto/cfb.c   	u8 *dst = walk->dst.virt.addr;
walk               64 crypto/cfb.c   	u8 *iv = walk->iv;
walk               75 crypto/cfb.c   	memcpy(walk->iv, iv, bsize);
walk               80 crypto/cfb.c   static int crypto_cfb_encrypt_inplace(struct skcipher_walk *walk,
walk               84 crypto/cfb.c   	unsigned int nbytes = walk->nbytes;
walk               85 crypto/cfb.c   	u8 *src = walk->src.virt.addr;
walk               86 crypto/cfb.c   	u8 *iv = walk->iv;
walk               97 crypto/cfb.c   	memcpy(walk->iv, iv, bsize);
walk              105 crypto/cfb.c   	struct skcipher_walk walk;
walk              109 crypto/cfb.c   	err = skcipher_walk_virt(&walk, req, false);
walk              111 crypto/cfb.c   	while (walk.nbytes >= bsize) {
walk              112 crypto/cfb.c   		if (walk.src.virt.addr == walk.dst.virt.addr)
walk              113 crypto/cfb.c   			err = crypto_cfb_encrypt_inplace(&walk, tfm);
walk              115 crypto/cfb.c   			err = crypto_cfb_encrypt_segment(&walk, tfm);
walk              116 crypto/cfb.c   		err = skcipher_walk_done(&walk, err);
walk              119 crypto/cfb.c   	if (walk.nbytes) {
walk              120 crypto/cfb.c   		crypto_cfb_final(&walk, tfm);
walk              121 crypto/cfb.c   		err = skcipher_walk_done(&walk, 0);
walk              127 crypto/cfb.c   static int crypto_cfb_decrypt_segment(struct skcipher_walk *walk,
walk              131 crypto/cfb.c   	unsigned int nbytes = walk->nbytes;
walk              132 crypto/cfb.c   	u8 *src = walk->src.virt.addr;
walk              133 crypto/cfb.c   	u8 *dst = walk->dst.virt.addr;
walk              134 crypto/cfb.c   	u8 *iv = walk->iv;
walk              145 crypto/cfb.c   	memcpy(walk->iv, iv, bsize);
walk              150 crypto/cfb.c   static int crypto_cfb_decrypt_inplace(struct skcipher_walk *walk,
walk              154 crypto/cfb.c   	unsigned int nbytes = walk->nbytes;
walk              155 crypto/cfb.c   	u8 *src = walk->src.virt.addr;
walk              156 crypto/cfb.c   	u8 * const iv = walk->iv;
walk              169 crypto/cfb.c   static int crypto_cfb_decrypt_blocks(struct skcipher_walk *walk,
walk              172 crypto/cfb.c   	if (walk->src.virt.addr == walk->dst.virt.addr)
walk              173 crypto/cfb.c   		return crypto_cfb_decrypt_inplace(walk, tfm);
walk              175 crypto/cfb.c   		return crypto_cfb_decrypt_segment(walk, tfm);
walk              181 crypto/cfb.c   	struct skcipher_walk walk;
walk              185 crypto/cfb.c   	err = skcipher_walk_virt(&walk, req, false);
walk              187 crypto/cfb.c   	while (walk.nbytes >= bsize) {
walk              188 crypto/cfb.c   		err = crypto_cfb_decrypt_blocks(&walk, tfm);
walk              189 crypto/cfb.c   		err = skcipher_walk_done(&walk, err);
walk              192 crypto/cfb.c   	if (walk.nbytes) {
walk              193 crypto/cfb.c   		crypto_cfb_final(&walk, tfm);
walk              194 crypto/cfb.c   		err = skcipher_walk_done(&walk, 0);
walk               37 crypto/chacha_generic.c 	struct skcipher_walk walk;
walk               41 crypto/chacha_generic.c 	err = skcipher_walk_virt(&walk, req, false);
walk               45 crypto/chacha_generic.c 	while (walk.nbytes > 0) {
walk               46 crypto/chacha_generic.c 		unsigned int nbytes = walk.nbytes;
walk               48 crypto/chacha_generic.c 		if (nbytes < walk.total)
walk               51 crypto/chacha_generic.c 		chacha_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
walk               53 crypto/chacha_generic.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk               78 crypto/crypto_null.c 	struct skcipher_walk walk;
walk               81 crypto/crypto_null.c 	err = skcipher_walk_virt(&walk, req, false);
walk               83 crypto/crypto_null.c 	while (walk.nbytes) {
walk               84 crypto/crypto_null.c 		if (walk.src.virt.addr != walk.dst.virt.addr)
walk               85 crypto/crypto_null.c 			memcpy(walk.dst.virt.addr, walk.src.virt.addr,
walk               86 crypto/crypto_null.c 			       walk.nbytes);
walk               87 crypto/crypto_null.c 		err = skcipher_walk_done(&walk, 0);
walk               27 crypto/ctr.c   static void crypto_ctr_crypt_final(struct skcipher_walk *walk,
walk               32 crypto/ctr.c   	u8 *ctrblk = walk->iv;
walk               35 crypto/ctr.c   	u8 *src = walk->src.virt.addr;
walk               36 crypto/ctr.c   	u8 *dst = walk->dst.virt.addr;
walk               37 crypto/ctr.c   	unsigned int nbytes = walk->nbytes;
walk               45 crypto/ctr.c   static int crypto_ctr_crypt_segment(struct skcipher_walk *walk,
walk               51 crypto/ctr.c   	u8 *ctrblk = walk->iv;
walk               52 crypto/ctr.c   	u8 *src = walk->src.virt.addr;
walk               53 crypto/ctr.c   	u8 *dst = walk->dst.virt.addr;
walk               54 crypto/ctr.c   	unsigned int nbytes = walk->nbytes;
walk               71 crypto/ctr.c   static int crypto_ctr_crypt_inplace(struct skcipher_walk *walk,
walk               78 crypto/ctr.c   	unsigned int nbytes = walk->nbytes;
walk               79 crypto/ctr.c   	u8 *ctrblk = walk->iv;
walk               80 crypto/ctr.c   	u8 *src = walk->src.virt.addr;
walk              103 crypto/ctr.c   	struct skcipher_walk walk;
walk              107 crypto/ctr.c   	err = skcipher_walk_virt(&walk, req, false);
walk              109 crypto/ctr.c   	while (walk.nbytes >= bsize) {
walk              110 crypto/ctr.c   		if (walk.src.virt.addr == walk.dst.virt.addr)
walk              111 crypto/ctr.c   			nbytes = crypto_ctr_crypt_inplace(&walk, cipher);
walk              113 crypto/ctr.c   			nbytes = crypto_ctr_crypt_segment(&walk, cipher);
walk              115 crypto/ctr.c   		err = skcipher_walk_done(&walk, nbytes);
walk              118 crypto/ctr.c   	if (walk.nbytes) {
walk              119 crypto/ctr.c   		crypto_ctr_crypt_final(&walk, cipher);
walk              120 crypto/ctr.c   		err = skcipher_walk_done(&walk, 0);
walk               20 crypto/ecb.c   	struct skcipher_walk walk;
walk               24 crypto/ecb.c   	err = skcipher_walk_virt(&walk, req, false);
walk               26 crypto/ecb.c   	while ((nbytes = walk.nbytes) != 0) {
walk               27 crypto/ecb.c   		const u8 *src = walk.src.virt.addr;
walk               28 crypto/ecb.c   		u8 *dst = walk.dst.virt.addr;
walk               37 crypto/ecb.c   		err = skcipher_walk_done(&walk, nbytes);
walk              101 crypto/keywrap.c static void crypto_kw_scatterlist_ff(struct scatter_walk *walk,
walk              113 crypto/keywrap.c 			scatterwalk_start(walk, sg);
walk              114 crypto/keywrap.c 			scatterwalk_advance(walk, skip);
walk               22 crypto/ofb.c   	struct skcipher_walk walk;
walk               25 crypto/ofb.c   	err = skcipher_walk_virt(&walk, req, false);
walk               27 crypto/ofb.c   	while (walk.nbytes >= bsize) {
walk               28 crypto/ofb.c   		const u8 *src = walk.src.virt.addr;
walk               29 crypto/ofb.c   		u8 *dst = walk.dst.virt.addr;
walk               30 crypto/ofb.c   		u8 * const iv = walk.iv;
walk               31 crypto/ofb.c   		unsigned int nbytes = walk.nbytes;
walk               40 crypto/ofb.c   		err = skcipher_walk_done(&walk, nbytes);
walk               43 crypto/ofb.c   	if (walk.nbytes) {
walk               44 crypto/ofb.c   		crypto_cipher_encrypt_one(cipher, walk.iv, walk.iv);
walk               45 crypto/ofb.c   		crypto_xor_cpy(walk.dst.virt.addr, walk.src.virt.addr, walk.iv,
walk               46 crypto/ofb.c   			       walk.nbytes);
walk               47 crypto/ofb.c   		err = skcipher_walk_done(&walk, 0);
walk               20 crypto/pcbc.c  				       struct skcipher_walk *walk,
walk               24 crypto/pcbc.c  	unsigned int nbytes = walk->nbytes;
walk               25 crypto/pcbc.c  	u8 *src = walk->src.virt.addr;
walk               26 crypto/pcbc.c  	u8 *dst = walk->dst.virt.addr;
walk               27 crypto/pcbc.c  	u8 * const iv = walk->iv;
walk               42 crypto/pcbc.c  				       struct skcipher_walk *walk,
walk               46 crypto/pcbc.c  	unsigned int nbytes = walk->nbytes;
walk               47 crypto/pcbc.c  	u8 *src = walk->src.virt.addr;
walk               48 crypto/pcbc.c  	u8 * const iv = walk->iv;
walk               67 crypto/pcbc.c  	struct skcipher_walk walk;
walk               71 crypto/pcbc.c  	err = skcipher_walk_virt(&walk, req, false);
walk               73 crypto/pcbc.c  	while ((nbytes = walk.nbytes)) {
walk               74 crypto/pcbc.c  		if (walk.src.virt.addr == walk.dst.virt.addr)
walk               75 crypto/pcbc.c  			nbytes = crypto_pcbc_encrypt_inplace(req, &walk,
walk               78 crypto/pcbc.c  			nbytes = crypto_pcbc_encrypt_segment(req, &walk,
walk               80 crypto/pcbc.c  		err = skcipher_walk_done(&walk, nbytes);
walk               87 crypto/pcbc.c  				       struct skcipher_walk *walk,
walk               91 crypto/pcbc.c  	unsigned int nbytes = walk->nbytes;
walk               92 crypto/pcbc.c  	u8 *src = walk->src.virt.addr;
walk               93 crypto/pcbc.c  	u8 *dst = walk->dst.virt.addr;
walk               94 crypto/pcbc.c  	u8 * const iv = walk->iv;
walk              109 crypto/pcbc.c  				       struct skcipher_walk *walk,
walk              113 crypto/pcbc.c  	unsigned int nbytes = walk->nbytes;
walk              114 crypto/pcbc.c  	u8 *src = walk->src.virt.addr;
walk              115 crypto/pcbc.c  	u8 * const iv = walk->iv;
walk              134 crypto/pcbc.c  	struct skcipher_walk walk;
walk              138 crypto/pcbc.c  	err = skcipher_walk_virt(&walk, req, false);
walk              140 crypto/pcbc.c  	while ((nbytes = walk.nbytes)) {
walk              141 crypto/pcbc.c  		if (walk.src.virt.addr == walk.dst.virt.addr)
walk              142 crypto/pcbc.c  			nbytes = crypto_pcbc_decrypt_inplace(req, &walk,
walk              145 crypto/pcbc.c  			nbytes = crypto_pcbc_decrypt_segment(req, &walk,
walk              147 crypto/pcbc.c  		err = skcipher_walk_done(&walk, nbytes);
walk              157 crypto/salsa20_generic.c 	struct skcipher_walk walk;
walk              161 crypto/salsa20_generic.c 	err = skcipher_walk_virt(&walk, req, false);
walk              165 crypto/salsa20_generic.c 	while (walk.nbytes > 0) {
walk              166 crypto/salsa20_generic.c 		unsigned int nbytes = walk.nbytes;
walk              168 crypto/salsa20_generic.c 		if (nbytes < walk.total)
walk              169 crypto/salsa20_generic.c 			nbytes = round_down(nbytes, walk.stride);
walk              171 crypto/salsa20_generic.c 		salsa20_docrypt(state, walk.dst.virt.addr, walk.src.virt.addr,
walk              173 crypto/salsa20_generic.c 		err = skcipher_walk_done(&walk, walk.nbytes - nbytes);
walk               26 crypto/scatterwalk.c void scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
walk               30 crypto/scatterwalk.c 		unsigned int len_this_page = scatterwalk_pagelen(walk);
walk               37 crypto/scatterwalk.c 			vaddr = scatterwalk_map(walk);
walk               42 crypto/scatterwalk.c 		scatterwalk_advance(walk, len_this_page);
walk               50 crypto/scatterwalk.c 		scatterwalk_pagedone(walk, out & 1, 1);
walk               58 crypto/scatterwalk.c 	struct scatter_walk walk;
walk               66 crypto/scatterwalk.c 	scatterwalk_start(&walk, sg);
walk               67 crypto/scatterwalk.c 	scatterwalk_copychunks(buf, &walk, nbytes, out);
walk               68 crypto/scatterwalk.c 	scatterwalk_done(&walk, out, 0);
walk              242 crypto/shash.c 	struct crypto_hash_walk walk;
walk              245 crypto/shash.c 	for (nbytes = crypto_hash_walk_first(req, &walk); nbytes > 0;
walk              246 crypto/shash.c 	     nbytes = crypto_hash_walk_done(&walk, nbytes))
walk              247 crypto/shash.c 		nbytes = crypto_shash_update(desc, walk.data, nbytes);
walk              265 crypto/shash.c 	struct crypto_hash_walk walk;
walk              268 crypto/shash.c 	nbytes = crypto_hash_walk_first(req, &walk);
walk              273 crypto/shash.c 		nbytes = crypto_hash_walk_last(&walk) ?
walk              274 crypto/shash.c 			 crypto_shash_finup(desc, walk.data, nbytes,
walk              276 crypto/shash.c 			 crypto_shash_update(desc, walk.data, nbytes);
walk              277 crypto/shash.c 		nbytes = crypto_hash_walk_done(&walk, nbytes);
walk               42 crypto/skcipher.c static int skcipher_walk_next(struct skcipher_walk *walk);
walk               44 crypto/skcipher.c static inline void skcipher_unmap(struct scatter_walk *walk, void *vaddr)
walk               46 crypto/skcipher.c 	if (PageHighMem(scatterwalk_page(walk)))
walk               50 crypto/skcipher.c static inline void *skcipher_map(struct scatter_walk *walk)
walk               52 crypto/skcipher.c 	struct page *page = scatterwalk_page(walk);
walk               55 crypto/skcipher.c 	       offset_in_page(walk->offset);
walk               58 crypto/skcipher.c static inline void skcipher_map_src(struct skcipher_walk *walk)
walk               60 crypto/skcipher.c 	walk->src.virt.addr = skcipher_map(&walk->in);
walk               63 crypto/skcipher.c static inline void skcipher_map_dst(struct skcipher_walk *walk)
walk               65 crypto/skcipher.c 	walk->dst.virt.addr = skcipher_map(&walk->out);
walk               68 crypto/skcipher.c static inline void skcipher_unmap_src(struct skcipher_walk *walk)
walk               70 crypto/skcipher.c 	skcipher_unmap(&walk->in, walk->src.virt.addr);
walk               73 crypto/skcipher.c static inline void skcipher_unmap_dst(struct skcipher_walk *walk)
walk               75 crypto/skcipher.c 	skcipher_unmap(&walk->out, walk->dst.virt.addr);
walk               78 crypto/skcipher.c static inline gfp_t skcipher_walk_gfp(struct skcipher_walk *walk)
walk               80 crypto/skcipher.c 	return walk->flags & SKCIPHER_WALK_SLEEP ? GFP_KERNEL : GFP_ATOMIC;
walk               93 crypto/skcipher.c static int skcipher_done_slow(struct skcipher_walk *walk, unsigned int bsize)
walk               97 crypto/skcipher.c 	addr = (u8 *)ALIGN((unsigned long)walk->buffer, walk->alignmask + 1);
walk               99 crypto/skcipher.c 	scatterwalk_copychunks(addr, &walk->out, bsize,
walk              100 crypto/skcipher.c 			       (walk->flags & SKCIPHER_WALK_PHYS) ? 2 : 1);
walk              104 crypto/skcipher.c int skcipher_walk_done(struct skcipher_walk *walk, int err)
walk              106 crypto/skcipher.c 	unsigned int n = walk->nbytes;
walk              114 crypto/skcipher.c 		nbytes = walk->total - n;
walk              117 crypto/skcipher.c 	if (likely(!(walk->flags & (SKCIPHER_WALK_PHYS |
walk              122 crypto/skcipher.c 		skcipher_unmap_src(walk);
walk              123 crypto/skcipher.c 	} else if (walk->flags & SKCIPHER_WALK_DIFF) {
walk              124 crypto/skcipher.c 		skcipher_unmap_dst(walk);
walk              126 crypto/skcipher.c 	} else if (walk->flags & SKCIPHER_WALK_COPY) {
walk              127 crypto/skcipher.c 		skcipher_map_dst(walk);
walk              128 crypto/skcipher.c 		memcpy(walk->dst.virt.addr, walk->page, n);
walk              129 crypto/skcipher.c 		skcipher_unmap_dst(walk);
walk              130 crypto/skcipher.c 	} else if (unlikely(walk->flags & SKCIPHER_WALK_SLOW)) {
walk              141 crypto/skcipher.c 			n = skcipher_done_slow(walk, n);
walk              147 crypto/skcipher.c 	walk->total = nbytes;
walk              148 crypto/skcipher.c 	walk->nbytes = 0;
walk              150 crypto/skcipher.c 	scatterwalk_advance(&walk->in, n);
walk              151 crypto/skcipher.c 	scatterwalk_advance(&walk->out, n);
walk              152 crypto/skcipher.c 	scatterwalk_done(&walk->in, 0, nbytes);
walk              153 crypto/skcipher.c 	scatterwalk_done(&walk->out, 1, nbytes);
walk              156 crypto/skcipher.c 		crypto_yield(walk->flags & SKCIPHER_WALK_SLEEP ?
walk              158 crypto/skcipher.c 		return skcipher_walk_next(walk);
walk              163 crypto/skcipher.c 	if (!((unsigned long)walk->buffer | (unsigned long)walk->page))
walk              166 crypto/skcipher.c 	if (walk->flags & SKCIPHER_WALK_PHYS)
walk              169 crypto/skcipher.c 	if (walk->iv != walk->oiv)
walk              170 crypto/skcipher.c 		memcpy(walk->oiv, walk->iv, walk->ivsize);
walk              171 crypto/skcipher.c 	if (walk->buffer != walk->page)
walk              172 crypto/skcipher.c 		kfree(walk->buffer);
walk              173 crypto/skcipher.c 	if (walk->page)
walk              174 crypto/skcipher.c 		free_page((unsigned long)walk->page);
walk              181 crypto/skcipher.c void skcipher_walk_complete(struct skcipher_walk *walk, int err)
walk              185 crypto/skcipher.c 	list_for_each_entry_safe(p, tmp, &walk->buffers, entry) {
walk              193 crypto/skcipher.c 			data = PTR_ALIGN(&p->buffer[0], walk->alignmask + 1);
walk              194 crypto/skcipher.c 			data = skcipher_get_spot(data, walk->stride);
walk              199 crypto/skcipher.c 		if (offset_in_page(p->data) + p->len + walk->stride >
walk              208 crypto/skcipher.c 	if (!err && walk->iv != walk->oiv)
walk              209 crypto/skcipher.c 		memcpy(walk->oiv, walk->iv, walk->ivsize);
walk              210 crypto/skcipher.c 	if (walk->buffer != walk->page)
walk              211 crypto/skcipher.c 		kfree(walk->buffer);
walk              212 crypto/skcipher.c 	if (walk->page)
walk              213 crypto/skcipher.c 		free_page((unsigned long)walk->page);
walk              217 crypto/skcipher.c static void skcipher_queue_write(struct skcipher_walk *walk,
walk              220 crypto/skcipher.c 	p->dst = walk->out;
walk              221 crypto/skcipher.c 	list_add_tail(&p->entry, &walk->buffers);
walk              224 crypto/skcipher.c static int skcipher_next_slow(struct skcipher_walk *walk, unsigned int bsize)
walk              226 crypto/skcipher.c 	bool phys = walk->flags & SKCIPHER_WALK_PHYS;
walk              227 crypto/skcipher.c 	unsigned alignmask = walk->alignmask;
walk              235 crypto/skcipher.c 		if (!walk->buffer)
walk              236 crypto/skcipher.c 			walk->buffer = walk->page;
walk              237 crypto/skcipher.c 		buffer = walk->buffer;
walk              258 crypto/skcipher.c 	v = kzalloc(n, skcipher_walk_gfp(walk));
walk              260 crypto/skcipher.c 		return skcipher_walk_done(walk, -ENOMEM);
walk              265 crypto/skcipher.c 		skcipher_queue_write(walk, p);
walk              268 crypto/skcipher.c 		walk->buffer = v;
walk              273 crypto/skcipher.c 	walk->dst.virt.addr = PTR_ALIGN(buffer, alignmask + 1);
walk              274 crypto/skcipher.c 	walk->dst.virt.addr = skcipher_get_spot(walk->dst.virt.addr, bsize);
walk              275 crypto/skcipher.c 	walk->src.virt.addr = walk->dst.virt.addr;
walk              277 crypto/skcipher.c 	scatterwalk_copychunks(walk->src.virt.addr, &walk->in, bsize, 0);
walk              279 crypto/skcipher.c 	walk->nbytes = bsize;
walk              280 crypto/skcipher.c 	walk->flags |= SKCIPHER_WALK_SLOW;
walk              285 crypto/skcipher.c static int skcipher_next_copy(struct skcipher_walk *walk)
walk              288 crypto/skcipher.c 	u8 *tmp = walk->page;
walk              290 crypto/skcipher.c 	skcipher_map_src(walk);
walk              291 crypto/skcipher.c 	memcpy(tmp, walk->src.virt.addr, walk->nbytes);
walk              292 crypto/skcipher.c 	skcipher_unmap_src(walk);
walk              294 crypto/skcipher.c 	walk->src.virt.addr = tmp;
walk              295 crypto/skcipher.c 	walk->dst.virt.addr = tmp;
walk              297 crypto/skcipher.c 	if (!(walk->flags & SKCIPHER_WALK_PHYS))
walk              300 crypto/skcipher.c 	p = kmalloc(sizeof(*p), skcipher_walk_gfp(walk));
walk              304 crypto/skcipher.c 	p->data = walk->page;
walk              305 crypto/skcipher.c 	p->len = walk->nbytes;
walk              306 crypto/skcipher.c 	skcipher_queue_write(walk, p);
walk              308 crypto/skcipher.c 	if (offset_in_page(walk->page) + walk->nbytes + walk->stride >
walk              310 crypto/skcipher.c 		walk->page = NULL;
walk              312 crypto/skcipher.c 		walk->page += walk->nbytes;
walk              317 crypto/skcipher.c static int skcipher_next_fast(struct skcipher_walk *walk)
walk              321 crypto/skcipher.c 	walk->src.phys.page = scatterwalk_page(&walk->in);
walk              322 crypto/skcipher.c 	walk->src.phys.offset = offset_in_page(walk->in.offset);
walk              323 crypto/skcipher.c 	walk->dst.phys.page = scatterwalk_page(&walk->out);
walk              324 crypto/skcipher.c 	walk->dst.phys.offset = offset_in_page(walk->out.offset);
walk              326 crypto/skcipher.c 	if (walk->flags & SKCIPHER_WALK_PHYS)
walk              329 crypto/skcipher.c 	diff = walk->src.phys.offset - walk->dst.phys.offset;
walk              330 crypto/skcipher.c 	diff |= walk->src.virt.page - walk->dst.virt.page;
walk              332 crypto/skcipher.c 	skcipher_map_src(walk);
walk              333 crypto/skcipher.c 	walk->dst.virt.addr = walk->src.virt.addr;
walk              336 crypto/skcipher.c 		walk->flags |= SKCIPHER_WALK_DIFF;
walk              337 crypto/skcipher.c 		skcipher_map_dst(walk);
walk              343 crypto/skcipher.c static int skcipher_walk_next(struct skcipher_walk *walk)
walk              349 crypto/skcipher.c 	walk->flags &= ~(SKCIPHER_WALK_SLOW | SKCIPHER_WALK_COPY |
walk              352 crypto/skcipher.c 	n = walk->total;
walk              353 crypto/skcipher.c 	bsize = min(walk->stride, max(n, walk->blocksize));
walk              354 crypto/skcipher.c 	n = scatterwalk_clamp(&walk->in, n);
walk              355 crypto/skcipher.c 	n = scatterwalk_clamp(&walk->out, n);
walk              358 crypto/skcipher.c 		if (unlikely(walk->total < walk->blocksize))
walk              359 crypto/skcipher.c 			return skcipher_walk_done(walk, -EINVAL);
walk              362 crypto/skcipher.c 		err = skcipher_next_slow(walk, bsize);
walk              366 crypto/skcipher.c 	if (unlikely((walk->in.offset | walk->out.offset) & walk->alignmask)) {
walk              367 crypto/skcipher.c 		if (!walk->page) {
walk              368 crypto/skcipher.c 			gfp_t gfp = skcipher_walk_gfp(walk);
walk              370 crypto/skcipher.c 			walk->page = (void *)__get_free_page(gfp);
walk              371 crypto/skcipher.c 			if (!walk->page)
walk              375 crypto/skcipher.c 		walk->nbytes = min_t(unsigned, n,
walk              376 crypto/skcipher.c 				     PAGE_SIZE - offset_in_page(walk->page));
walk              377 crypto/skcipher.c 		walk->flags |= SKCIPHER_WALK_COPY;
walk              378 crypto/skcipher.c 		err = skcipher_next_copy(walk);
walk              382 crypto/skcipher.c 	walk->nbytes = n;
walk              384 crypto/skcipher.c 	return skcipher_next_fast(walk);
walk              387 crypto/skcipher.c 	if (!err && (walk->flags & SKCIPHER_WALK_PHYS)) {
walk              388 crypto/skcipher.c 		walk->src.phys.page = virt_to_page(walk->src.virt.addr);
walk              389 crypto/skcipher.c 		walk->dst.phys.page = virt_to_page(walk->dst.virt.addr);
walk              390 crypto/skcipher.c 		walk->src.phys.offset &= PAGE_SIZE - 1;
walk              391 crypto/skcipher.c 		walk->dst.phys.offset &= PAGE_SIZE - 1;
walk              396 crypto/skcipher.c static int skcipher_copy_iv(struct skcipher_walk *walk)
walk              399 crypto/skcipher.c 	unsigned alignmask = walk->alignmask;
walk              400 crypto/skcipher.c 	unsigned ivsize = walk->ivsize;
walk              401 crypto/skcipher.c 	unsigned bs = walk->stride;
walk              411 crypto/skcipher.c 	if (walk->flags & SKCIPHER_WALK_PHYS)
walk              420 crypto/skcipher.c 	walk->buffer = kmalloc(size, skcipher_walk_gfp(walk));
walk              421 crypto/skcipher.c 	if (!walk->buffer)
walk              424 crypto/skcipher.c 	iv = PTR_ALIGN(walk->buffer, alignmask + 1);
walk              427 crypto/skcipher.c 	walk->iv = memcpy(iv, walk->iv, walk->ivsize);
walk              431 crypto/skcipher.c static int skcipher_walk_first(struct skcipher_walk *walk)
walk              436 crypto/skcipher.c 	walk->buffer = NULL;
walk              437 crypto/skcipher.c 	if (unlikely(((unsigned long)walk->iv & walk->alignmask))) {
walk              438 crypto/skcipher.c 		int err = skcipher_copy_iv(walk);
walk              443 crypto/skcipher.c 	walk->page = NULL;
walk              445 crypto/skcipher.c 	return skcipher_walk_next(walk);
walk              448 crypto/skcipher.c static int skcipher_walk_skcipher(struct skcipher_walk *walk,
walk              453 crypto/skcipher.c 	walk->total = req->cryptlen;
walk              454 crypto/skcipher.c 	walk->nbytes = 0;
walk              455 crypto/skcipher.c 	walk->iv = req->iv;
walk              456 crypto/skcipher.c 	walk->oiv = req->iv;
walk              458 crypto/skcipher.c 	if (unlikely(!walk->total))
walk              461 crypto/skcipher.c 	scatterwalk_start(&walk->in, req->src);
walk              462 crypto/skcipher.c 	scatterwalk_start(&walk->out, req->dst);
walk              464 crypto/skcipher.c 	walk->flags &= ~SKCIPHER_WALK_SLEEP;
walk              465 crypto/skcipher.c 	walk->flags |= req->base.flags & CRYPTO_TFM_REQ_MAY_SLEEP ?
walk              468 crypto/skcipher.c 	walk->blocksize = crypto_skcipher_blocksize(tfm);
walk              469 crypto/skcipher.c 	walk->stride = crypto_skcipher_walksize(tfm);
walk              470 crypto/skcipher.c 	walk->ivsize = crypto_skcipher_ivsize(tfm);
walk              471 crypto/skcipher.c 	walk->alignmask = crypto_skcipher_alignmask(tfm);
walk              473 crypto/skcipher.c 	return skcipher_walk_first(walk);
walk              476 crypto/skcipher.c int skcipher_walk_virt(struct skcipher_walk *walk,
walk              483 crypto/skcipher.c 	walk->flags &= ~SKCIPHER_WALK_PHYS;
walk              485 crypto/skcipher.c 	err = skcipher_walk_skcipher(walk, req);
walk              487 crypto/skcipher.c 	walk->flags &= atomic ? ~SKCIPHER_WALK_SLEEP : ~0;
walk              493 crypto/skcipher.c void skcipher_walk_atomise(struct skcipher_walk *walk)
walk              495 crypto/skcipher.c 	walk->flags &= ~SKCIPHER_WALK_SLEEP;
walk              499 crypto/skcipher.c int skcipher_walk_async(struct skcipher_walk *walk,
walk              502 crypto/skcipher.c 	walk->flags |= SKCIPHER_WALK_PHYS;
walk              504 crypto/skcipher.c 	INIT_LIST_HEAD(&walk->buffers);
walk              506 crypto/skcipher.c 	return skcipher_walk_skcipher(walk, req);
walk              510 crypto/skcipher.c static int skcipher_walk_aead_common(struct skcipher_walk *walk,
walk              516 crypto/skcipher.c 	walk->nbytes = 0;
walk              517 crypto/skcipher.c 	walk->iv = req->iv;
walk              518 crypto/skcipher.c 	walk->oiv = req->iv;
walk              520 crypto/skcipher.c 	if (unlikely(!walk->total))
walk              523 crypto/skcipher.c 	walk->flags &= ~SKCIPHER_WALK_PHYS;
walk              525 crypto/skcipher.c 	scatterwalk_start(&walk->in, req->src);
walk              526 crypto/skcipher.c 	scatterwalk_start(&walk->out, req->dst);
walk              528 crypto/skcipher.c 	scatterwalk_copychunks(NULL, &walk->in, req->assoclen, 2);
walk              529 crypto/skcipher.c 	scatterwalk_copychunks(NULL, &walk->out, req->assoclen, 2);
walk              531 crypto/skcipher.c 	scatterwalk_done(&walk->in, 0, walk->total);
walk              532 crypto/skcipher.c 	scatterwalk_done(&walk->out, 0, walk->total);
walk              535 crypto/skcipher.c 		walk->flags |= SKCIPHER_WALK_SLEEP;
walk              537 crypto/skcipher.c 		walk->flags &= ~SKCIPHER_WALK_SLEEP;
walk              539 crypto/skcipher.c 	walk->blocksize = crypto_aead_blocksize(tfm);
walk              540 crypto/skcipher.c 	walk->stride = crypto_aead_chunksize(tfm);
walk              541 crypto/skcipher.c 	walk->ivsize = crypto_aead_ivsize(tfm);
walk              542 crypto/skcipher.c 	walk->alignmask = crypto_aead_alignmask(tfm);
walk              544 crypto/skcipher.c 	err = skcipher_walk_first(walk);
walk              547 crypto/skcipher.c 		walk->flags &= ~SKCIPHER_WALK_SLEEP;
walk              552 crypto/skcipher.c int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
walk              555 crypto/skcipher.c 	walk->total = req->cryptlen;
walk              557 crypto/skcipher.c 	return skcipher_walk_aead_common(walk, req, atomic);
walk              561 crypto/skcipher.c int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
walk              564 crypto/skcipher.c 	walk->total = req->cryptlen;
walk              566 crypto/skcipher.c 	return skcipher_walk_aead_common(walk, req, atomic);
walk              570 crypto/skcipher.c int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
walk              575 crypto/skcipher.c 	walk->total = req->cryptlen - crypto_aead_authsize(tfm);
walk              577 crypto/skcipher.c 	return skcipher_walk_aead_common(walk, req, atomic);
walk               86 drivers/atm/idt77105.c 	struct idt77105_priv *walk;
walk               91 drivers/atm/idt77105.c 	for (walk = idt77105_all; walk; walk = walk->next) {
walk               92 drivers/atm/idt77105.c 		dev = walk->dev;
walk               94 drivers/atm/idt77105.c 		stats = &walk->stats;
walk              115 drivers/atm/idt77105.c 	struct idt77105_priv *walk;
walk              120 drivers/atm/idt77105.c 	for (walk = idt77105_all; walk; walk = walk->next) {
walk              121 drivers/atm/idt77105.c 		dev = walk->dev;
walk              135 drivers/atm/idt77105.c 	            PUT( walk->old_mcr ,MCR);
walk              323 drivers/atm/idt77105.c 	struct idt77105_priv *walk, *prev;
walk              331 drivers/atm/idt77105.c 	for (prev = NULL, walk = idt77105_all ;
walk              332 drivers/atm/idt77105.c              walk != NULL;
walk              333 drivers/atm/idt77105.c              prev = walk, walk = walk->next) {
walk              334 drivers/atm/idt77105.c             if (walk->dev == dev) {
walk              336 drivers/atm/idt77105.c                     prev->next = walk->next;
walk              338 drivers/atm/idt77105.c                     idt77105_all = walk->next;
walk              341 drivers/atm/idt77105.c                 kfree(walk);
walk               59 drivers/atm/suni.c 	struct suni_priv *walk;
walk               63 drivers/atm/suni.c 	for (walk = sunis; walk; walk = walk->next) {
walk               64 drivers/atm/suni.c 		dev = walk->dev;
walk               65 drivers/atm/suni.c 		stats = &walk->sonet_stats;
walk              343 drivers/atm/suni.c 	struct suni_priv **walk;
walk              348 drivers/atm/suni.c 	for (walk = &sunis; *walk != PRIV(dev);
walk              349 drivers/atm/suni.c 	    walk = &PRIV((*walk)->dev)->next);
walk              350 drivers/atm/suni.c 	*walk = PRIV((*walk)->dev)->next;
walk              797 drivers/crypto/axis/artpec6_crypto.c 				  struct artpec6_crypto_walk *walk, size_t size)
walk              807 drivers/crypto/axis/artpec6_crypto.c 	bbuf->sg = walk->sg;
walk              808 drivers/crypto/axis/artpec6_crypto.c 	bbuf->offset = walk->offset;
walk              816 drivers/crypto/axis/artpec6_crypto.c 	pr_debug("BOUNCE %zu offset %zu\n", size, walk->offset);
walk              823 drivers/crypto/axis/artpec6_crypto.c 				  struct artpec6_crypto_walk *walk,
walk              830 drivers/crypto/axis/artpec6_crypto.c 	while (walk->sg && count) {
walk              831 drivers/crypto/axis/artpec6_crypto.c 		chunk = min(count, artpec6_crypto_walk_chunklen(walk));
walk              832 drivers/crypto/axis/artpec6_crypto.c 		addr = artpec6_crypto_walk_chunk_phys(walk);
walk              845 drivers/crypto/axis/artpec6_crypto.c 			ret = setup_bounce_buffer_in(common, walk, chunk);
walk              848 drivers/crypto/axis/artpec6_crypto.c 			ret = setup_bounce_buffer_in(common, walk, chunk);
walk              857 drivers/crypto/axis/artpec6_crypto.c 							 sg_page(walk->sg),
walk              858 drivers/crypto/axis/artpec6_crypto.c 							 walk->sg->offset +
walk              859 drivers/crypto/axis/artpec6_crypto.c 							 walk->offset,
walk              875 drivers/crypto/axis/artpec6_crypto.c 		artpec6_crypto_walk_advance(walk, chunk);
walk              886 drivers/crypto/axis/artpec6_crypto.c 				   struct artpec6_crypto_walk *walk,
walk              893 drivers/crypto/axis/artpec6_crypto.c 	while (walk->sg && count) {
walk              894 drivers/crypto/axis/artpec6_crypto.c 		chunk = min(count, artpec6_crypto_walk_chunklen(walk));
walk              895 drivers/crypto/axis/artpec6_crypto.c 		addr = artpec6_crypto_walk_chunk_phys(walk);
walk              904 drivers/crypto/axis/artpec6_crypto.c 			sg_pcopy_to_buffer(walk->sg, 1, buf, chunk,
walk              905 drivers/crypto/axis/artpec6_crypto.c 					   walk->offset);
walk              914 drivers/crypto/axis/artpec6_crypto.c 							 sg_page(walk->sg),
walk              915 drivers/crypto/axis/artpec6_crypto.c 							 walk->sg->offset +
walk              916 drivers/crypto/axis/artpec6_crypto.c 							 walk->offset,
walk              932 drivers/crypto/axis/artpec6_crypto.c 		artpec6_crypto_walk_advance(walk, chunk);
walk             1404 drivers/crypto/axis/artpec6_crypto.c 		struct artpec6_crypto_walk walk;
walk             1428 drivers/crypto/axis/artpec6_crypto.c 		artpec6_crypto_walk_init(&walk, areq->src);
walk             1430 drivers/crypto/axis/artpec6_crypto.c 		error = artpec6_crypto_setup_sg_descrs_out(common, &walk,
walk             1436 drivers/crypto/axis/artpec6_crypto.c 		if (walk.sg) {
walk             1668 drivers/crypto/axis/artpec6_crypto.c 	struct artpec6_crypto_walk walk;
walk             1789 drivers/crypto/axis/artpec6_crypto.c 	artpec6_crypto_walk_init(&walk, areq->src);
walk             1790 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, areq->cryptlen);
walk             1795 drivers/crypto/axis/artpec6_crypto.c 	artpec6_crypto_walk_init(&walk, areq->dst);
walk             1796 drivers/crypto/axis/artpec6_crypto.c 	ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, areq->cryptlen);
walk             1930 drivers/crypto/axis/artpec6_crypto.c 		struct artpec6_crypto_walk walk;
walk             1932 drivers/crypto/axis/artpec6_crypto.c 		artpec6_crypto_walk_init(&walk, areq->src);
walk             1936 drivers/crypto/axis/artpec6_crypto.c 		ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
walk             1953 drivers/crypto/axis/artpec6_crypto.c 		ret = artpec6_crypto_setup_sg_descrs_out(common, &walk, count);
walk             1972 drivers/crypto/axis/artpec6_crypto.c 		struct artpec6_crypto_walk walk;
walk             1978 drivers/crypto/axis/artpec6_crypto.c 		artpec6_crypto_walk_init(&walk, areq->dst);
walk             1981 drivers/crypto/axis/artpec6_crypto.c 		count = artpec6_crypto_walk_advance(&walk, areq->assoclen);
walk             1986 drivers/crypto/axis/artpec6_crypto.c 		ret = artpec6_crypto_setup_sg_descrs_in(common, &walk, count);
walk             2018 drivers/crypto/axis/artpec6_crypto.c 			ret = artpec6_crypto_setup_sg_descrs_in(common, &walk,
walk              380 drivers/crypto/chelsio/chcr_algo.c static inline void dsgl_walk_init(struct dsgl_walk *walk,
walk              383 drivers/crypto/chelsio/chcr_algo.c 	walk->dsgl = dsgl;
walk              384 drivers/crypto/chelsio/chcr_algo.c 	walk->nents = 0;
walk              385 drivers/crypto/chelsio/chcr_algo.c 	walk->to = (struct phys_sge_pairs *)(dsgl + 1);
walk              388 drivers/crypto/chelsio/chcr_algo.c static inline void dsgl_walk_end(struct dsgl_walk *walk, unsigned short qid,
walk              393 drivers/crypto/chelsio/chcr_algo.c 	phys_cpl = walk->dsgl;
walk              403 drivers/crypto/chelsio/chcr_algo.c 		      CPL_RX_PHYS_DSGL_NOOFSGENTR_V(walk->nents));
walk              410 drivers/crypto/chelsio/chcr_algo.c static inline void dsgl_walk_add_page(struct dsgl_walk *walk,
walk              418 drivers/crypto/chelsio/chcr_algo.c 	j = walk->nents;
walk              419 drivers/crypto/chelsio/chcr_algo.c 	walk->to->len[j % 8] = htons(size);
walk              420 drivers/crypto/chelsio/chcr_algo.c 	walk->to->addr[j % 8] = cpu_to_be64(addr);
walk              423 drivers/crypto/chelsio/chcr_algo.c 		walk->to++;
walk              424 drivers/crypto/chelsio/chcr_algo.c 	walk->nents = j;
walk              427 drivers/crypto/chelsio/chcr_algo.c static void  dsgl_walk_add_sg(struct dsgl_walk *walk,
walk              434 drivers/crypto/chelsio/chcr_algo.c 	unsigned int j = walk->nents;
walk              455 drivers/crypto/chelsio/chcr_algo.c 			walk->to->len[j % 8] = htons(ent_len);
walk              456 drivers/crypto/chelsio/chcr_algo.c 			walk->to->addr[j % 8] = cpu_to_be64(sg_dma_address(sg) +
walk              462 drivers/crypto/chelsio/chcr_algo.c 				walk->to++;
walk              464 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
walk              465 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg_len = min_t(u32, left_size, sg_dma_len(sg) -
walk              471 drivers/crypto/chelsio/chcr_algo.c 	walk->nents = j;
walk              474 drivers/crypto/chelsio/chcr_algo.c static inline void ulptx_walk_init(struct ulptx_walk *walk,
walk              477 drivers/crypto/chelsio/chcr_algo.c 	walk->sgl = ulp;
walk              478 drivers/crypto/chelsio/chcr_algo.c 	walk->nents = 0;
walk              479 drivers/crypto/chelsio/chcr_algo.c 	walk->pair_idx = 0;
walk              480 drivers/crypto/chelsio/chcr_algo.c 	walk->pair = ulp->sge;
walk              481 drivers/crypto/chelsio/chcr_algo.c 	walk->last_sg = NULL;
walk              482 drivers/crypto/chelsio/chcr_algo.c 	walk->last_sg_len = 0;
walk              485 drivers/crypto/chelsio/chcr_algo.c static inline void ulptx_walk_end(struct ulptx_walk *walk)
walk              487 drivers/crypto/chelsio/chcr_algo.c 	walk->sgl->cmd_nsge = htonl(ULPTX_CMD_V(ULP_TX_SC_DSGL) |
walk              488 drivers/crypto/chelsio/chcr_algo.c 			      ULPTX_NSGE_V(walk->nents));
walk              492 drivers/crypto/chelsio/chcr_algo.c static inline void ulptx_walk_add_page(struct ulptx_walk *walk,
walk              499 drivers/crypto/chelsio/chcr_algo.c 	if (walk->nents == 0) {
walk              500 drivers/crypto/chelsio/chcr_algo.c 		walk->sgl->len0 = cpu_to_be32(size);
walk              501 drivers/crypto/chelsio/chcr_algo.c 		walk->sgl->addr0 = cpu_to_be64(addr);
walk              503 drivers/crypto/chelsio/chcr_algo.c 		walk->pair->addr[walk->pair_idx] = cpu_to_be64(addr);
walk              504 drivers/crypto/chelsio/chcr_algo.c 		walk->pair->len[walk->pair_idx] = cpu_to_be32(size);
walk              505 drivers/crypto/chelsio/chcr_algo.c 		walk->pair_idx = !walk->pair_idx;
walk              506 drivers/crypto/chelsio/chcr_algo.c 		if (!walk->pair_idx)
walk              507 drivers/crypto/chelsio/chcr_algo.c 			walk->pair++;
walk              509 drivers/crypto/chelsio/chcr_algo.c 	walk->nents++;
walk              512 drivers/crypto/chelsio/chcr_algo.c static void  ulptx_walk_add_sg(struct ulptx_walk *walk,
walk              534 drivers/crypto/chelsio/chcr_algo.c 	if (sg && (walk->nents == 0)) {
walk              537 drivers/crypto/chelsio/chcr_algo.c 		walk->sgl->len0 = cpu_to_be32(sgmin);
walk              538 drivers/crypto/chelsio/chcr_algo.c 		walk->sgl->addr0 = cpu_to_be64(sg_dma_address(sg) + skip_len);
walk              539 drivers/crypto/chelsio/chcr_algo.c 		walk->nents++;
walk              541 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
walk              542 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg_len = sgmin + skip_len;
walk              553 drivers/crypto/chelsio/chcr_algo.c 		walk->pair->len[walk->pair_idx] = cpu_to_be32(sgmin);
walk              554 drivers/crypto/chelsio/chcr_algo.c 		walk->pair->addr[walk->pair_idx] =
walk              556 drivers/crypto/chelsio/chcr_algo.c 		walk->pair_idx = !walk->pair_idx;
walk              557 drivers/crypto/chelsio/chcr_algo.c 		walk->nents++;
walk              558 drivers/crypto/chelsio/chcr_algo.c 		if (!walk->pair_idx)
walk              559 drivers/crypto/chelsio/chcr_algo.c 			walk->pair++;
walk              562 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg = sg;
walk              563 drivers/crypto/chelsio/chcr_algo.c 		walk->last_sg_len = skip_len;
walk              282 drivers/crypto/geode-aes.c 	struct skcipher_walk walk;
walk              297 drivers/crypto/geode-aes.c 	err = skcipher_walk_virt(&walk, req, false);
walk              299 drivers/crypto/geode-aes.c 	while ((nbytes = walk.nbytes) != 0) {
walk              300 drivers/crypto/geode-aes.c 		geode_aes_crypt(tctx, walk.src.virt.addr, walk.dst.virt.addr,
walk              302 drivers/crypto/geode-aes.c 				walk.iv, mode, dir);
walk              303 drivers/crypto/geode-aes.c 		err = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
walk              623 drivers/crypto/hifn_795x.c 	struct hifn_cipher_walk	walk;
walk             1339 drivers/crypto/hifn_795x.c 	t = &rctx->walk.cache[0];
walk             1342 drivers/crypto/hifn_795x.c 		if (t->length && rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
walk             1534 drivers/crypto/hifn_795x.c 	rctx->walk.flags = 0;
walk             1542 drivers/crypto/hifn_795x.c 			rctx->walk.flags |= ASYNC_FLAGS_MISALIGNED;
walk             1548 drivers/crypto/hifn_795x.c 	if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
walk             1549 drivers/crypto/hifn_795x.c 		err = hifn_cipher_walk_init(&rctx->walk, idx, GFP_ATOMIC);
walk             1554 drivers/crypto/hifn_795x.c 	sg_num = hifn_cipher_walk(req, &rctx->walk);
walk             1667 drivers/crypto/hifn_795x.c 	if (rctx->walk.flags & ASYNC_FLAGS_MISALIGNED) {
walk             1674 drivers/crypto/hifn_795x.c 			t = &rctx->walk.cache[idx];
walk             1702 drivers/crypto/hifn_795x.c 		hifn_cipher_walk_exit(&rctx->walk);
walk              527 drivers/crypto/n2_core.c 	struct crypto_hash_walk walk;
walk              550 drivers/crypto/n2_core.c 	nbytes = crypto_hash_walk_first(req, &walk);
walk              569 drivers/crypto/n2_core.c 	ent->src_addr = __pa(walk.data);
walk              577 drivers/crypto/n2_core.c 	nbytes = crypto_hash_walk_done(&walk, 0);
walk              582 drivers/crypto/n2_core.c 		ent->src_addr = __pa(walk.data);
walk              590 drivers/crypto/n2_core.c 		nbytes = crypto_hash_walk_done(&walk, 0);
walk              686 drivers/crypto/n2_core.c 	struct ablkcipher_walk	walk;
walk              725 drivers/crypto/n2_core.c 	struct ablkcipher_walk	walk;
walk              883 drivers/crypto/n2_core.c 	struct ablkcipher_walk *walk = &rctx->walk;
walk              890 drivers/crypto/n2_core.c 	ablkcipher_walk_init(walk, req->dst, req->src, req->nbytes);
walk              891 drivers/crypto/n2_core.c 	err = ablkcipher_walk_phys(req, walk);
walk              908 drivers/crypto/n2_core.c 	while ((nbytes = walk->nbytes) != 0) {
walk              913 drivers/crypto/n2_core.c 		src_paddr = (page_to_phys(walk->src.page) +
walk              914 drivers/crypto/n2_core.c 			     walk->src.offset);
walk              915 drivers/crypto/n2_core.c 		dest_paddr = (page_to_phys(walk->dst.page) +
walk              916 drivers/crypto/n2_core.c 			      walk->dst.offset);
walk              918 drivers/crypto/n2_core.c 		this_len = cipher_descriptor_len(nbytes, walk->blocksize);
walk              949 drivers/crypto/n2_core.c 		err = ablkcipher_walk_done(req, walk, nbytes - this_len);
walk              967 drivers/crypto/n2_core.c 		memcpy(rctx->walk.iv, final_iv, rctx->walk.blocksize);
walk              969 drivers/crypto/n2_core.c 	ablkcipher_walk_complete(&rctx->walk);
walk             1053 drivers/crypto/n2_core.c 		iv_paddr = __pa(rctx->walk.iv);
walk             1060 drivers/crypto/n2_core.c 			iv_paddr = c->dest_final - rctx->walk.blocksize;
walk             1070 drivers/crypto/n2_core.c 				iv_paddr = __pa(rctx->walk.iv);
walk             1074 drivers/crypto/n2_core.c 					    rctx->walk.blocksize);
walk             1081 drivers/crypto/n2_core.c 				      rctx->walk.blocksize);
walk             1084 drivers/crypto/n2_core.c 				       rctx->walk.blocksize);
walk              106 drivers/crypto/nx/nx-aes-gcm.c 	struct scatter_walk walk;
walk              113 drivers/crypto/nx/nx-aes-gcm.c 		scatterwalk_start(&walk, req->src);
walk              114 drivers/crypto/nx/nx-aes-gcm.c 		scatterwalk_copychunks(out, &walk, nbytes, SCATTERWALK_FROM_SG);
walk              115 drivers/crypto/nx/nx-aes-gcm.c 		scatterwalk_done(&walk, SCATTERWALK_FROM_SG, 0);
walk              154 drivers/crypto/nx/nx.c 	struct scatter_walk walk;
walk              161 drivers/crypto/nx/nx.c 		scatterwalk_start(&walk, sg_src);
walk              172 drivers/crypto/nx/nx.c 	scatterwalk_advance(&walk, start - offset);
walk              175 drivers/crypto/nx/nx.c 		n = scatterwalk_clamp(&walk, len);
walk              179 drivers/crypto/nx/nx.c 			scatterwalk_start(&walk, sg_next(walk.sg));
walk              180 drivers/crypto/nx/nx.c 			n = scatterwalk_clamp(&walk, len);
walk              182 drivers/crypto/nx/nx.c 		dst = scatterwalk_map(&walk);
walk              188 drivers/crypto/nx/nx.c 		scatterwalk_advance(&walk, n);
walk              189 drivers/crypto/nx/nx.c 		scatterwalk_done(&walk, SCATTERWALK_FROM_SG, len);
walk              346 drivers/crypto/padlock-aes.c 	struct blkcipher_walk walk;
walk              351 drivers/crypto/padlock-aes.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              352 drivers/crypto/padlock-aes.c 	err = blkcipher_walk_virt(desc, &walk);
walk              354 drivers/crypto/padlock-aes.c 	while ((nbytes = walk.nbytes)) {
walk              355 drivers/crypto/padlock-aes.c 		padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
walk              359 drivers/crypto/padlock-aes.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              372 drivers/crypto/padlock-aes.c 	struct blkcipher_walk walk;
walk              377 drivers/crypto/padlock-aes.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              378 drivers/crypto/padlock-aes.c 	err = blkcipher_walk_virt(desc, &walk);
walk              380 drivers/crypto/padlock-aes.c 	while ((nbytes = walk.nbytes)) {
walk              381 drivers/crypto/padlock-aes.c 		padlock_xcrypt_ecb(walk.src.virt.addr, walk.dst.virt.addr,
walk              385 drivers/crypto/padlock-aes.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              419 drivers/crypto/padlock-aes.c 	struct blkcipher_walk walk;
walk              424 drivers/crypto/padlock-aes.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              425 drivers/crypto/padlock-aes.c 	err = blkcipher_walk_virt(desc, &walk);
walk              427 drivers/crypto/padlock-aes.c 	while ((nbytes = walk.nbytes)) {
walk              428 drivers/crypto/padlock-aes.c 		u8 *iv = padlock_xcrypt_cbc(walk.src.virt.addr,
walk              429 drivers/crypto/padlock-aes.c 					    walk.dst.virt.addr, ctx->E,
walk              430 drivers/crypto/padlock-aes.c 					    walk.iv, &ctx->cword.encrypt,
walk              432 drivers/crypto/padlock-aes.c 		memcpy(walk.iv, iv, AES_BLOCK_SIZE);
walk              434 drivers/crypto/padlock-aes.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              447 drivers/crypto/padlock-aes.c 	struct blkcipher_walk walk;
walk              452 drivers/crypto/padlock-aes.c 	blkcipher_walk_init(&walk, dst, src, nbytes);
walk              453 drivers/crypto/padlock-aes.c 	err = blkcipher_walk_virt(desc, &walk);
walk              455 drivers/crypto/padlock-aes.c 	while ((nbytes = walk.nbytes)) {
walk              456 drivers/crypto/padlock-aes.c 		padlock_xcrypt_cbc(walk.src.virt.addr, walk.dst.virt.addr,
walk              457 drivers/crypto/padlock-aes.c 				   ctx->D, walk.iv, &ctx->cword.decrypt,
walk              460 drivers/crypto/padlock-aes.c 		err = blkcipher_walk_done(desc, &walk, nbytes);
walk              469 drivers/crypto/s5p-sss.c 	struct scatter_walk walk;
walk              474 drivers/crypto/s5p-sss.c 	scatterwalk_start(&walk, sg);
walk              475 drivers/crypto/s5p-sss.c 	scatterwalk_copychunks(buf, &walk, nbytes, out);
walk              476 drivers/crypto/s5p-sss.c 	scatterwalk_done(&walk, out, 0);
walk              331 drivers/crypto/stm32/stm32-cryp.c 	struct scatter_walk walk;
walk              336 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_start(&walk, sg);
walk              337 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_advance(&walk, start);
walk              338 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_copychunks(buf, &walk, nbytes, out);
walk              339 drivers/crypto/stm32/stm32-cryp.c 	scatterwalk_done(&walk, out, 0);
walk              890 drivers/crypto/ux500/cryp/cryp_core.c 	struct ablkcipher_walk walk;
walk              905 drivers/crypto/ux500/cryp/cryp_core.c 	ablkcipher_walk_init(&walk, areq->dst, areq->src, areq->nbytes);
walk              906 drivers/crypto/ux500/cryp/cryp_core.c 	ret = ablkcipher_walk_phys(areq, &walk);
walk              914 drivers/crypto/ux500/cryp/cryp_core.c 	while ((nbytes = walk.nbytes) > 0) {
walk              915 drivers/crypto/ux500/cryp/cryp_core.c 		ctx->iv = walk.iv;
walk              916 drivers/crypto/ux500/cryp/cryp_core.c 		src_paddr = (page_to_phys(walk.src.page) + walk.src.offset);
walk              919 drivers/crypto/ux500/cryp/cryp_core.c 		dst_paddr = (page_to_phys(walk.dst.page) + walk.dst.offset);
walk              929 drivers/crypto/ux500/cryp/cryp_core.c 		ret = ablkcipher_walk_done(areq, &walk, nbytes);
walk              933 drivers/crypto/ux500/cryp/cryp_core.c 	ablkcipher_walk_complete(&walk);
walk             1074 drivers/crypto/ux500/hash/hash_core.c 	struct crypto_hash_walk walk;
walk             1075 drivers/crypto/ux500/hash/hash_core.c 	int msg_length = crypto_hash_walk_first(req, &walk);
walk             1098 drivers/crypto/ux500/hash/hash_core.c 		data_buffer = walk.data;
walk             1108 drivers/crypto/ux500/hash/hash_core.c 		msg_length = crypto_hash_walk_done(&walk, 0);
walk               75 drivers/crypto/vmx/aes_cbc.c 	struct skcipher_walk walk;
walk               88 drivers/crypto/vmx/aes_cbc.c 	ret = skcipher_walk_virt(&walk, req, false);
walk               89 drivers/crypto/vmx/aes_cbc.c 	while ((nbytes = walk.nbytes) != 0) {
walk               93 drivers/crypto/vmx/aes_cbc.c 		aes_p8_cbc_encrypt(walk.src.virt.addr,
walk               94 drivers/crypto/vmx/aes_cbc.c 				   walk.dst.virt.addr,
walk               97 drivers/crypto/vmx/aes_cbc.c 				   walk.iv, enc);
walk              102 drivers/crypto/vmx/aes_cbc.c 		ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
walk               70 drivers/crypto/vmx/aes_ctr.c 			     struct skcipher_walk *walk)
walk               72 drivers/crypto/vmx/aes_ctr.c 	u8 *ctrblk = walk->iv;
walk               74 drivers/crypto/vmx/aes_ctr.c 	u8 *src = walk->src.virt.addr;
walk               75 drivers/crypto/vmx/aes_ctr.c 	u8 *dst = walk->dst.virt.addr;
walk               76 drivers/crypto/vmx/aes_ctr.c 	unsigned int nbytes = walk->nbytes;
walk               94 drivers/crypto/vmx/aes_ctr.c 	struct skcipher_walk walk;
walk              106 drivers/crypto/vmx/aes_ctr.c 	ret = skcipher_walk_virt(&walk, req, false);
walk              107 drivers/crypto/vmx/aes_ctr.c 	while ((nbytes = walk.nbytes) >= AES_BLOCK_SIZE) {
walk              111 drivers/crypto/vmx/aes_ctr.c 		aes_p8_ctr32_encrypt_blocks(walk.src.virt.addr,
walk              112 drivers/crypto/vmx/aes_ctr.c 					    walk.dst.virt.addr,
walk              114 drivers/crypto/vmx/aes_ctr.c 					    &ctx->enc_key, walk.iv);
walk              120 drivers/crypto/vmx/aes_ctr.c 			crypto_inc(walk.iv, AES_BLOCK_SIZE);
walk              123 drivers/crypto/vmx/aes_ctr.c 		ret = skcipher_walk_done(&walk, nbytes);
walk              126 drivers/crypto/vmx/aes_ctr.c 		p8_aes_ctr_final(ctx, &walk);
walk              127 drivers/crypto/vmx/aes_ctr.c 		ret = skcipher_walk_done(&walk, 0);
walk               82 drivers/crypto/vmx/aes_xts.c 	struct skcipher_walk walk;
walk               99 drivers/crypto/vmx/aes_xts.c 	ret = skcipher_walk_virt(&walk, req, false);
walk              107 drivers/crypto/vmx/aes_xts.c 	aes_p8_encrypt(walk.iv, tweak, &ctx->tweak_key);
walk              113 drivers/crypto/vmx/aes_xts.c 	while ((nbytes = walk.nbytes) != 0) {
walk              118 drivers/crypto/vmx/aes_xts.c 			aes_p8_xts_encrypt(walk.src.virt.addr,
walk              119 drivers/crypto/vmx/aes_xts.c 					   walk.dst.virt.addr,
walk              123 drivers/crypto/vmx/aes_xts.c 			aes_p8_xts_decrypt(walk.src.virt.addr,
walk              124 drivers/crypto/vmx/aes_xts.c 					   walk.dst.virt.addr,
walk              131 drivers/crypto/vmx/aes_xts.c 		ret = skcipher_walk_done(&walk, nbytes % AES_BLOCK_SIZE);
walk              662 drivers/net/ethernet/sun/sungem.c 			int walk = entry;
walk              667 drivers/net/ethernet/sun/sungem.c 				walk = NEXT_TX(walk);
walk              668 drivers/net/ethernet/sun/sungem.c 				if (walk == limit)
walk              670 drivers/net/ethernet/sun/sungem.c 				if (walk == last)
walk              632 drivers/vfio/pci/vfio_pci.c 	struct vfio_pci_walk_info *walk = data;
walk              634 drivers/vfio/pci/vfio_pci.c 	if (!walk->slot || vfio_pci_dev_below_slot(pdev, walk->pdev->slot))
walk              635 drivers/vfio/pci/vfio_pci.c 		walk->ret = walk->fn(pdev, walk->data);
walk              637 drivers/vfio/pci/vfio_pci.c 	return walk->ret;
walk              645 drivers/vfio/pci/vfio_pci.c 	struct vfio_pci_walk_info walk = {
walk              649 drivers/vfio/pci/vfio_pci.c 	pci_walk_bus(pdev->bus, vfio_pci_walk_wrapper, &walk);
walk              651 drivers/vfio/pci/vfio_pci.c 	return walk.ret;
walk              471 fs/fat/inode.c 	unsigned char exe_extensions[] = "EXECOMBAT", *walk;
walk              473 fs/fat/inode.c 	for (walk = exe_extensions; *walk; walk += 3)
walk              474 fs/fat/inode.c 		if (!strncmp(extension, walk, 3))
walk               28 fs/fat/namei_msdos.c 	unsigned char *walk;
walk               45 fs/fat/namei_msdos.c 	for (walk = res; len && walk - res < 8; walk++) {
walk               64 fs/fat/namei_msdos.c 		if ((res == walk) && (c == 0xE5))
walk               69 fs/fat/namei_msdos.c 		*walk = (!opts->nocase && c >= 'a' && c <= 'z') ? c - 32 : c;
walk               82 fs/fat/namei_msdos.c 		while (walk - res < 8)
walk               83 fs/fat/namei_msdos.c 			*walk++ = ' ';
walk               84 fs/fat/namei_msdos.c 		while (len > 0 && walk - res < MSDOS_NAME) {
walk              103 fs/fat/namei_msdos.c 				*walk++ = c - 32;
walk              105 fs/fat/namei_msdos.c 				*walk++ = c;
walk              112 fs/fat/namei_msdos.c 	while (walk - res < MSDOS_NAME)
walk              113 fs/fat/namei_msdos.c 		*walk++ = ' ';
walk              508 fs/proc/task_mmu.c 		struct mm_walk *walk)
walk              510 fs/proc/task_mmu.c 	struct mem_size_stats *mss = walk->private;
walk              513 fs/proc/task_mmu.c 			walk->vma->vm_file->f_mapping, addr, end);
walk              522 fs/proc/task_mmu.c 		struct mm_walk *walk)
walk              524 fs/proc/task_mmu.c 	struct mem_size_stats *mss = walk->private;
walk              525 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk              574 fs/proc/task_mmu.c 		struct mm_walk *walk)
walk              576 fs/proc/task_mmu.c 	struct mem_size_stats *mss = walk->private;
walk              577 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk              597 fs/proc/task_mmu.c 		struct mm_walk *walk)
walk              603 fs/proc/task_mmu.c 			   struct mm_walk *walk)
walk              605 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk              612 fs/proc/task_mmu.c 			smaps_pmd_entry(pmd, addr, walk);
walk              626 fs/proc/task_mmu.c 		smaps_pte_entry(pte, addr, walk);
walk              709 fs/proc/task_mmu.c 				 struct mm_walk *walk)
walk              711 fs/proc/task_mmu.c 	struct mem_size_stats *mss = walk->private;
walk              712 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1049 fs/proc/task_mmu.c 				unsigned long end, struct mm_walk *walk)
walk             1051 fs/proc/task_mmu.c 	struct clear_refs_private *cp = walk->private;
walk             1052 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1108 fs/proc/task_mmu.c 				struct mm_walk *walk)
walk             1110 fs/proc/task_mmu.c 	struct clear_refs_private *cp = walk->private;
walk             1111 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1285 fs/proc/task_mmu.c 				struct mm_walk *walk)
walk             1287 fs/proc/task_mmu.c 	struct pagemapread *pm = walk->private;
walk             1292 fs/proc/task_mmu.c 		struct vm_area_struct *vma = find_vma(walk->mm, addr);
walk             1364 fs/proc/task_mmu.c 			     struct mm_walk *walk)
walk             1366 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1367 fs/proc/task_mmu.c 	struct pagemapread *pm = walk->private;
walk             1439 fs/proc/task_mmu.c 	orig_pte = pte = pte_offset_map_lock(walk->mm, pmdp, addr, &ptl);
walk             1459 fs/proc/task_mmu.c 				 struct mm_walk *walk)
walk             1461 fs/proc/task_mmu.c 	struct pagemapread *pm = walk->private;
walk             1462 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1744 fs/proc/task_mmu.c 		unsigned long end, struct mm_walk *walk)
walk             1746 fs/proc/task_mmu.c 	struct numa_maps *md = walk->private;
walk             1747 fs/proc/task_mmu.c 	struct vm_area_struct *vma = walk->vma;
walk             1768 fs/proc/task_mmu.c 	orig_pte = pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
walk             1782 fs/proc/task_mmu.c 		unsigned long addr, unsigned long end, struct mm_walk *walk)
walk             1795 fs/proc/task_mmu.c 	md = walk->private;
walk             1802 fs/proc/task_mmu.c 		unsigned long addr, unsigned long end, struct mm_walk *walk)
walk              891 fs/select.c    		struct poll_list *walk;
walk              894 fs/select.c    		for (walk = list; walk != NULL; walk = walk->next) {
walk              897 fs/select.c    			pfd = walk->entries;
walk              898 fs/select.c    			pfd_end = pfd + walk->len;
walk              970 fs/select.c     	struct poll_list *walk = head;
walk              978 fs/select.c    		walk->next = NULL;
walk              979 fs/select.c    		walk->len = len;
walk              983 fs/select.c    		if (copy_from_user(walk->entries, ufds + nfds-todo,
walk              984 fs/select.c    					sizeof(struct pollfd) * walk->len))
walk              987 fs/select.c    		todo -= walk->len;
walk              992 fs/select.c    		walk = walk->next = kmalloc(struct_size(walk, entries, len),
walk              994 fs/select.c    		if (!walk) {
walk             1004 fs/select.c    	for (walk = head; walk; walk = walk->next) {
walk             1005 fs/select.c    		struct pollfd *fds = walk->entries;
walk             1008 fs/select.c    		for (j = 0; j < walk->len; j++, ufds++)
walk             1015 fs/select.c    	walk = head->next;
walk             1016 fs/select.c    	while (walk) {
walk             1017 fs/select.c    		struct poll_list *pos = walk;
walk             1018 fs/select.c    		walk = walk->next;
walk              237 include/crypto/algapi.h 			struct blkcipher_walk *walk, int err);
walk              239 include/crypto/algapi.h 			struct blkcipher_walk *walk);
walk              241 include/crypto/algapi.h 			struct blkcipher_walk *walk);
walk              243 include/crypto/algapi.h 			      struct blkcipher_walk *walk,
walk              246 include/crypto/algapi.h 				   struct blkcipher_walk *walk,
walk              251 include/crypto/algapi.h 			 struct ablkcipher_walk *walk, int err);
walk              253 include/crypto/algapi.h 			 struct ablkcipher_walk *walk);
walk              254 include/crypto/algapi.h void __ablkcipher_walk_complete(struct ablkcipher_walk *walk);
walk              322 include/crypto/algapi.h static inline void blkcipher_walk_init(struct blkcipher_walk *walk,
walk              327 include/crypto/algapi.h 	walk->in.sg = src;
walk              328 include/crypto/algapi.h 	walk->out.sg = dst;
walk              329 include/crypto/algapi.h 	walk->total = nbytes;
walk              332 include/crypto/algapi.h static inline void ablkcipher_walk_init(struct ablkcipher_walk *walk,
walk              337 include/crypto/algapi.h 	walk->in.sg = src;
walk              338 include/crypto/algapi.h 	walk->out.sg = dst;
walk              339 include/crypto/algapi.h 	walk->total = nbytes;
walk              340 include/crypto/algapi.h 	INIT_LIST_HEAD(&walk->buffers);
walk              343 include/crypto/algapi.h static inline void ablkcipher_walk_complete(struct ablkcipher_walk *walk)
walk              345 include/crypto/algapi.h 	if (unlikely(!list_empty(&walk->buffers)))
walk              346 include/crypto/algapi.h 		__ablkcipher_walk_complete(walk);
walk               16 include/crypto/cbc.h 	struct skcipher_walk *walk, struct crypto_skcipher *tfm,
walk               20 include/crypto/cbc.h 	unsigned int nbytes = walk->nbytes;
walk               21 include/crypto/cbc.h 	u8 *src = walk->src.virt.addr;
walk               22 include/crypto/cbc.h 	u8 *dst = walk->dst.virt.addr;
walk               23 include/crypto/cbc.h 	u8 *iv = walk->iv;
walk               38 include/crypto/cbc.h 	struct skcipher_walk *walk, struct crypto_skcipher *tfm,
walk               42 include/crypto/cbc.h 	unsigned int nbytes = walk->nbytes;
walk               43 include/crypto/cbc.h 	u8 *src = walk->src.virt.addr;
walk               44 include/crypto/cbc.h 	u8 *iv = walk->iv;
walk               54 include/crypto/cbc.h 	memcpy(walk->iv, iv, bsize);
walk               64 include/crypto/cbc.h 	struct skcipher_walk walk;
walk               67 include/crypto/cbc.h 	err = skcipher_walk_virt(&walk, req, false);
walk               69 include/crypto/cbc.h 	while (walk.nbytes) {
walk               70 include/crypto/cbc.h 		if (walk.src.virt.addr == walk.dst.virt.addr)
walk               71 include/crypto/cbc.h 			err = crypto_cbc_encrypt_inplace(&walk, tfm, fn);
walk               73 include/crypto/cbc.h 			err = crypto_cbc_encrypt_segment(&walk, tfm, fn);
walk               74 include/crypto/cbc.h 		err = skcipher_walk_done(&walk, err);
walk               81 include/crypto/cbc.h 	struct skcipher_walk *walk, struct crypto_skcipher *tfm,
walk               85 include/crypto/cbc.h 	unsigned int nbytes = walk->nbytes;
walk               86 include/crypto/cbc.h 	u8 *src = walk->src.virt.addr;
walk               87 include/crypto/cbc.h 	u8 *dst = walk->dst.virt.addr;
walk               88 include/crypto/cbc.h 	u8 *iv = walk->iv;
walk               99 include/crypto/cbc.h 	memcpy(walk->iv, iv, bsize);
walk              105 include/crypto/cbc.h 	struct skcipher_walk *walk, struct crypto_skcipher *tfm,
walk              109 include/crypto/cbc.h 	unsigned int nbytes = walk->nbytes;
walk              110 include/crypto/cbc.h 	u8 *src = walk->src.virt.addr;
walk              125 include/crypto/cbc.h 	crypto_xor(src, walk->iv, bsize);
walk              126 include/crypto/cbc.h 	memcpy(walk->iv, last_iv, bsize);
walk              132 include/crypto/cbc.h 	struct skcipher_walk *walk, struct crypto_skcipher *tfm,
walk              135 include/crypto/cbc.h 	if (walk->src.virt.addr == walk->dst.virt.addr)
walk              136 include/crypto/cbc.h 		return crypto_cbc_decrypt_inplace(walk, tfm, fn);
walk              138 include/crypto/cbc.h 		return crypto_cbc_decrypt_segment(walk, tfm, fn);
walk               27 include/crypto/ctr.h 	struct skcipher_walk walk;
walk               34 include/crypto/ctr.h 	err = skcipher_walk_virt(&walk, req, false);
walk               36 include/crypto/ctr.h 	while (walk.nbytes > 0) {
walk               37 include/crypto/ctr.h 		u8 *dst = walk.dst.virt.addr;
walk               38 include/crypto/ctr.h 		u8 *src = walk.src.virt.addr;
walk               39 include/crypto/ctr.h 		int nbytes = walk.nbytes;
walk               42 include/crypto/ctr.h 		if (nbytes < walk.total) {
walk               43 include/crypto/ctr.h 			tail = walk.nbytes & (blocksize - 1);
walk               50 include/crypto/ctr.h 			fn(tfm, walk.iv, buf);
walk               53 include/crypto/ctr.h 			crypto_inc(walk.iv, blocksize);
walk               60 include/crypto/ctr.h 		err = skcipher_walk_done(&walk, tail);
walk               50 include/crypto/internal/hash.h int crypto_hash_walk_done(struct crypto_hash_walk *walk, int err);
walk               52 include/crypto/internal/hash.h 			   struct crypto_hash_walk *walk);
walk               54 include/crypto/internal/hash.h 			   struct crypto_hash_walk *walk);
walk               56 include/crypto/internal/hash.h static inline int crypto_ahash_walk_done(struct crypto_hash_walk *walk,
walk               59 include/crypto/internal/hash.h 	return crypto_hash_walk_done(walk, err);
walk               62 include/crypto/internal/hash.h static inline int crypto_hash_walk_last(struct crypto_hash_walk *walk)
walk               64 include/crypto/internal/hash.h 	return !(walk->entrylen | walk->total);
walk               67 include/crypto/internal/hash.h static inline int crypto_ahash_walk_last(struct crypto_hash_walk *walk)
walk               69 include/crypto/internal/hash.h 	return crypto_hash_walk_last(walk);
walk              136 include/crypto/internal/skcipher.h int skcipher_walk_done(struct skcipher_walk *walk, int err);
walk              137 include/crypto/internal/skcipher.h int skcipher_walk_virt(struct skcipher_walk *walk,
walk              140 include/crypto/internal/skcipher.h void skcipher_walk_atomise(struct skcipher_walk *walk);
walk              141 include/crypto/internal/skcipher.h int skcipher_walk_async(struct skcipher_walk *walk,
walk              143 include/crypto/internal/skcipher.h int skcipher_walk_aead(struct skcipher_walk *walk, struct aead_request *req,
walk              145 include/crypto/internal/skcipher.h int skcipher_walk_aead_encrypt(struct skcipher_walk *walk,
walk              147 include/crypto/internal/skcipher.h int skcipher_walk_aead_decrypt(struct skcipher_walk *walk,
walk              149 include/crypto/internal/skcipher.h void skcipher_walk_complete(struct skcipher_walk *walk, int err);
walk              151 include/crypto/internal/skcipher.h static inline void skcipher_walk_abort(struct skcipher_walk *walk)
walk              153 include/crypto/internal/skcipher.h 	skcipher_walk_done(walk, -ECANCELED);
walk               28 include/crypto/scatterwalk.h static inline unsigned int scatterwalk_pagelen(struct scatter_walk *walk)
walk               30 include/crypto/scatterwalk.h 	unsigned int len = walk->sg->offset + walk->sg->length - walk->offset;
walk               31 include/crypto/scatterwalk.h 	unsigned int len_this_page = offset_in_page(~walk->offset) + 1;
walk               35 include/crypto/scatterwalk.h static inline unsigned int scatterwalk_clamp(struct scatter_walk *walk,
walk               38 include/crypto/scatterwalk.h 	unsigned int len_this_page = scatterwalk_pagelen(walk);
walk               42 include/crypto/scatterwalk.h static inline void scatterwalk_advance(struct scatter_walk *walk,
walk               45 include/crypto/scatterwalk.h 	walk->offset += nbytes;
walk               48 include/crypto/scatterwalk.h static inline unsigned int scatterwalk_aligned(struct scatter_walk *walk,
walk               51 include/crypto/scatterwalk.h 	return !(walk->offset & alignmask);
walk               54 include/crypto/scatterwalk.h static inline struct page *scatterwalk_page(struct scatter_walk *walk)
walk               56 include/crypto/scatterwalk.h 	return sg_page(walk->sg) + (walk->offset >> PAGE_SHIFT);
walk               64 include/crypto/scatterwalk.h static inline void scatterwalk_start(struct scatter_walk *walk,
walk               67 include/crypto/scatterwalk.h 	walk->sg = sg;
walk               68 include/crypto/scatterwalk.h 	walk->offset = sg->offset;
walk               71 include/crypto/scatterwalk.h static inline void *scatterwalk_map(struct scatter_walk *walk)
walk               73 include/crypto/scatterwalk.h 	return kmap_atomic(scatterwalk_page(walk)) +
walk               74 include/crypto/scatterwalk.h 	       offset_in_page(walk->offset);
walk               77 include/crypto/scatterwalk.h static inline void scatterwalk_pagedone(struct scatter_walk *walk, int out,
walk               83 include/crypto/scatterwalk.h 		page = sg_page(walk->sg) + ((walk->offset - 1) >> PAGE_SHIFT);
walk               92 include/crypto/scatterwalk.h 	if (more && walk->offset >= walk->sg->offset + walk->sg->length)
walk               93 include/crypto/scatterwalk.h 		scatterwalk_start(walk, sg_next(walk->sg));
walk               96 include/crypto/scatterwalk.h static inline void scatterwalk_done(struct scatter_walk *walk, int out,
walk               99 include/crypto/scatterwalk.h 	if (!more || walk->offset >= walk->sg->offset + walk->sg->length ||
walk              100 include/crypto/scatterwalk.h 	    !(walk->offset & (PAGE_SIZE - 1)))
walk              101 include/crypto/scatterwalk.h 		scatterwalk_pagedone(walk, out, more);
walk              104 include/crypto/scatterwalk.h void scatterwalk_copychunks(void *buf, struct scatter_walk *walk,
walk              106 include/crypto/scatterwalk.h void *scatterwalk_map(struct scatter_walk *walk);
walk               30 include/linux/pagewalk.h 			 unsigned long next, struct mm_walk *walk);
walk               32 include/linux/pagewalk.h 			 unsigned long next, struct mm_walk *walk);
walk               34 include/linux/pagewalk.h 			 unsigned long next, struct mm_walk *walk);
walk               36 include/linux/pagewalk.h 			struct mm_walk *walk);
walk               39 include/linux/pagewalk.h 			     struct mm_walk *walk);
walk               41 include/linux/pagewalk.h 			struct mm_walk *walk);
walk               99 include/net/act_api.h 	int     (*walk)(struct net *, struct sk_buff *,
walk              353 include/net/netfilter/nf_tables.h 	void				(*walk)(const struct nft_ctx *ctx,
walk              211 include/net/sch_generic.h 	void			(*walk)(struct Qdisc *, struct qdisc_walker * arg);
walk              312 include/net/sch_generic.h 	void			(*walk)(struct tcf_proto *tp,
walk              488 include/net/xfrm.h 	struct xfrm_policy_walk_entry walk;
walk              518 include/net/xfrm.h 	struct xfrm_policy_walk_entry walk;
walk             1471 include/net/xfrm.h void xfrm_state_walk_init(struct xfrm_state_walk *walk, u8 proto,
walk             1473 include/net/xfrm.h int xfrm_state_walk(struct net *net, struct xfrm_state_walk *walk,
walk             1475 include/net/xfrm.h void xfrm_state_walk_done(struct xfrm_state_walk *walk, struct net *net);
walk             1632 include/net/xfrm.h void xfrm_policy_walk_init(struct xfrm_policy_walk *walk, u8 type);
walk             1633 include/net/xfrm.h int xfrm_policy_walk(struct net *net, struct xfrm_policy_walk *walk,
walk             1636 include/net/xfrm.h void xfrm_policy_walk_done(struct xfrm_policy_walk *walk, struct net *net);
walk              624 ipc/mqueue.c   	struct ext_wait_queue *walk;
walk              626 ipc/mqueue.c   	list_for_each_entry(walk, &info->e_wait_q[sr].list, list) {
walk              627 ipc/mqueue.c   		if (walk->task->prio <= current->prio) {
walk              628 ipc/mqueue.c   			list_add_tail(&ewp->list, &walk->list);
walk               29 kernel/locking/rtmutex-debug.h 						  enum rtmutex_chainwalk walk)
walk               32 kernel/locking/rtmutex.h 						  enum rtmutex_chainwalk walk)
walk               34 kernel/locking/rtmutex.h 	return walk == RT_MUTEX_FULL_CHAINWALK;
walk              223 mm/hmm.c       static int hmm_vma_do_fault(struct mm_walk *walk, unsigned long addr,
walk              227 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              229 mm/hmm.c       	struct vm_area_struct *vma = walk->vma;
walk              257 mm/hmm.c       			struct mm_walk *walk)
walk              259 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              285 mm/hmm.c       			      struct mm_walk *walk)
walk              287 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              295 mm/hmm.c       	if (write_fault && walk->vma && !(walk->vma->vm_flags & VM_WRITE))
walk              303 mm/hmm.c       			ret = hmm_vma_do_fault(walk, addr, write_fault,
walk              379 mm/hmm.c       			     struct mm_walk *walk)
walk              381 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              392 mm/hmm.c       	return hmm_vma_walk_hole_(addr, end, fault, write_fault, walk);
walk              405 mm/hmm.c       static int hmm_vma_handle_pmd(struct mm_walk *walk, unsigned long addr,
walk              408 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              420 mm/hmm.c       		return hmm_vma_walk_hole_(addr, end, fault, write_fault, walk);
walk              441 mm/hmm.c       int hmm_vma_handle_pmd(struct mm_walk *walk, unsigned long addr,
walk              454 mm/hmm.c       static int hmm_vma_handle_pte(struct mm_walk *walk, unsigned long addr,
walk              458 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              511 mm/hmm.c       				migration_entry_wait(walk->mm, pmdp, addr);
walk              549 mm/hmm.c       	return hmm_vma_walk_hole_(addr, end, fault, write_fault, walk);
walk              555 mm/hmm.c       			    struct mm_walk *walk)
walk              557 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              567 mm/hmm.c       		return hmm_vma_walk_hole(start, end, walk);
walk              582 mm/hmm.c       			pmd_migration_entry_wait(walk->mm, pmdp);
walk              587 mm/hmm.c       		return hmm_pfns_bad(start, end, walk);
walk              605 mm/hmm.c       		return hmm_vma_handle_pmd(walk, addr, end, &pfns[i], pmd);
walk              615 mm/hmm.c       		return hmm_pfns_bad(start, end, walk);
walk              622 mm/hmm.c       		r = hmm_vma_handle_pte(walk, addr, end, pmdp, ptep, &pfns[i]);
walk              657 mm/hmm.c       		struct mm_walk *walk)
walk              659 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              669 mm/hmm.c       		return hmm_vma_walk_hole(start, end, walk);
walk              677 mm/hmm.c       			return hmm_vma_walk_hole(start, end, walk);
walk              688 mm/hmm.c       						write_fault, walk);
walk              707 mm/hmm.c       	split_huge_pud(walk->vma, pudp, addr);
walk              714 mm/hmm.c       		ret = hmm_vma_walk_pmd(pmdp, addr, next, walk);
walk              728 mm/hmm.c       				      struct mm_walk *walk)
walk              731 mm/hmm.c       	struct hmm_vma_walk *hmm_vma_walk = walk->private;
walk              733 mm/hmm.c       	struct vm_area_struct *vma = walk->vma;
walk              740 mm/hmm.c       	ptl = huge_pte_lock(hstate_vma(vma), walk->mm, pte);
walk              765 mm/hmm.c       		return hmm_vma_walk_hole_(addr, end, fault, write_fault, walk);
walk              184 mm/madvise.c   	unsigned long end, struct mm_walk *walk)
walk              187 mm/madvise.c   	struct vm_area_struct *vma = walk->private;
walk              302 mm/madvise.c   				struct mm_walk *walk)
walk              304 mm/madvise.c   	struct madvise_walk_private *private = walk->private;
walk              308 mm/madvise.c   	struct vm_area_struct *vma = walk->vma;
walk              563 mm/madvise.c   				unsigned long end, struct mm_walk *walk)
walk              566 mm/madvise.c   	struct mmu_gather *tlb = walk->private;
walk              568 mm/madvise.c   	struct vm_area_struct *vma = walk->vma;
walk             5679 mm/memcontrol.c 					struct mm_walk *walk)
walk             5681 mm/memcontrol.c 	struct vm_area_struct *vma = walk->vma;
walk             5878 mm/memcontrol.c 				struct mm_walk *walk)
walk             5881 mm/memcontrol.c 	struct vm_area_struct *vma = walk->vma;
walk              442 mm/mempolicy.c 				unsigned long end, struct mm_walk *walk)
walk              446 mm/mempolicy.c 	struct queue_pages *qp = walk->private;
walk              456 mm/mempolicy.c 		__split_huge_pmd(walk->vma, pmd, addr, false, NULL);
walk              466 mm/mempolicy.c 		if (!vma_migratable(walk->vma) ||
walk              491 mm/mempolicy.c 			unsigned long end, struct mm_walk *walk)
walk              493 mm/mempolicy.c 	struct vm_area_struct *vma = walk->vma;
walk              495 mm/mempolicy.c 	struct queue_pages *qp = walk->private;
walk              504 mm/mempolicy.c 		ret = queue_pages_pmd(pmd, ptl, addr, end, walk);
walk              513 mm/mempolicy.c 	pte = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
walk              556 mm/mempolicy.c 			       struct mm_walk *walk)
walk              559 mm/mempolicy.c 	struct queue_pages *qp = walk->private;
walk              565 mm/mempolicy.c 	ptl = huge_pte_lock(hstate_vma(walk->vma), walk->mm, pte);
walk              614 mm/mempolicy.c 				struct mm_walk *walk)
walk              616 mm/mempolicy.c 	struct vm_area_struct *vma = walk->vma;
walk              617 mm/mempolicy.c 	struct queue_pages *qp = walk->private;
walk             2158 mm/migrate.c   				    struct mm_walk *walk)
walk             2160 mm/migrate.c   	struct migrate_vma *migrate = walk->private;
walk             2175 mm/migrate.c   				    struct mm_walk *walk)
walk             2177 mm/migrate.c   	struct migrate_vma *migrate = walk->private;
walk             2191 mm/migrate.c   				   struct mm_walk *walk)
walk             2193 mm/migrate.c   	struct migrate_vma *migrate = walk->private;
walk             2194 mm/migrate.c   	struct vm_area_struct *vma = walk->vma;
walk             2202 mm/migrate.c   		return migrate_vma_collect_hole(start, end, walk);
walk             2219 mm/migrate.c   								walk);
walk             2227 mm/migrate.c   								walk);
walk             2233 mm/migrate.c   								walk);
walk             2236 mm/migrate.c   								walk);
walk             2241 mm/migrate.c   		return migrate_vma_collect_skip(start, end, walk);
walk             2347 mm/migrate.c   		flush_tlb_range(walk->vma, start, end);
walk               25 mm/mincore.c   			unsigned long end, struct mm_walk *walk)
walk               29 mm/mincore.c   	unsigned char *vec = walk->private;
walk               38 mm/mincore.c   	walk->private = vec;
walk              115 mm/mincore.c   				   struct mm_walk *walk)
walk              117 mm/mincore.c   	walk->private += __mincore_unmapped_range(addr, end,
walk              118 mm/mincore.c   						  walk->vma, walk->private);
walk              123 mm/mincore.c   			struct mm_walk *walk)
walk              126 mm/mincore.c   	struct vm_area_struct *vma = walk->vma;
walk              128 mm/mincore.c   	unsigned char *vec = walk->private;
walk              143 mm/mincore.c   	ptep = pte_offset_map_lock(walk->mm, pmd, addr, &ptl);
walk              175 mm/mincore.c   	walk->private += nr;
walk              346 mm/mprotect.c  			       unsigned long next, struct mm_walk *walk)
walk              348 mm/mprotect.c  	return pfn_modify_allowed(pte_pfn(*pte), *(pgprot_t *)(walk->private)) ?
walk              354 mm/mprotect.c  				   struct mm_walk *walk)
walk              356 mm/mprotect.c  	return pfn_modify_allowed(pte_pfn(*pte), *(pgprot_t *)(walk->private)) ?
walk              361 mm/mprotect.c  			  struct mm_walk *walk)
walk                8 mm/pagewalk.c  			  struct mm_walk *walk)
walk               12 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk               16 mm/pagewalk.c  		err = ops->pte_entry(pte, addr, addr + PAGE_SIZE, walk);
walk               30 mm/pagewalk.c  			  struct mm_walk *walk)
walk               34 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk               41 mm/pagewalk.c  		if (pmd_none(*pmd) || !walk->vma) {
walk               43 mm/pagewalk.c  				err = ops->pte_hole(addr, next, walk);
walk               53 mm/pagewalk.c  			err = ops->pmd_entry(pmd, addr, next, walk);
walk               64 mm/pagewalk.c  		split_huge_pmd(walk->vma, pmd, addr);
walk               67 mm/pagewalk.c  		err = walk_pte_range(pmd, addr, next, walk);
walk               76 mm/pagewalk.c  			  struct mm_walk *walk)
walk               80 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk               87 mm/pagewalk.c  		if (pud_none(*pud) || !walk->vma) {
walk               89 mm/pagewalk.c  				err = ops->pte_hole(addr, next, walk);
walk               96 mm/pagewalk.c  			spinlock_t *ptl = pud_trans_huge_lock(pud, walk->vma);
walk               99 mm/pagewalk.c  				err = ops->pud_entry(pud, addr, next, walk);
walk              107 mm/pagewalk.c  		split_huge_pud(walk->vma, pud, addr);
walk              112 mm/pagewalk.c  			err = walk_pmd_range(pud, addr, next, walk);
walk              121 mm/pagewalk.c  			  struct mm_walk *walk)
walk              125 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk              133 mm/pagewalk.c  				err = ops->pte_hole(addr, next, walk);
walk              139 mm/pagewalk.c  			err = walk_pud_range(p4d, addr, next, walk);
walk              148 mm/pagewalk.c  			  struct mm_walk *walk)
walk              152 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk              155 mm/pagewalk.c  	pgd = pgd_offset(walk->mm, addr);
walk              160 mm/pagewalk.c  				err = ops->pte_hole(addr, next, walk);
walk              166 mm/pagewalk.c  			err = walk_p4d_range(pgd, addr, next, walk);
walk              183 mm/pagewalk.c  			      struct mm_walk *walk)
walk              185 mm/pagewalk.c  	struct vm_area_struct *vma = walk->vma;
walk              191 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk              196 mm/pagewalk.c  		pte = huge_pte_offset(walk->mm, addr & hmask, sz);
walk              199 mm/pagewalk.c  			err = ops->hugetlb_entry(pte, hmask, addr, next, walk);
walk              201 mm/pagewalk.c  			err = ops->pte_hole(addr, next, walk);
walk              212 mm/pagewalk.c  			      struct mm_walk *walk)
walk              226 mm/pagewalk.c  			struct mm_walk *walk)
walk              228 mm/pagewalk.c  	struct vm_area_struct *vma = walk->vma;
walk              229 mm/pagewalk.c  	const struct mm_walk_ops *ops = walk->ops;
walk              232 mm/pagewalk.c  		return ops->test_walk(start, end, walk);
walk              245 mm/pagewalk.c  			err = ops->pte_hole(start, end, walk);
walk              252 mm/pagewalk.c  			struct mm_walk *walk)
walk              255 mm/pagewalk.c  	struct vm_area_struct *vma = walk->vma;
walk              258 mm/pagewalk.c  		if (walk->ops->hugetlb_entry)
walk              259 mm/pagewalk.c  			err = walk_hugetlb_range(start, end, walk);
walk              261 mm/pagewalk.c  		err = walk_pgd_range(start, end, walk);
walk              308 mm/pagewalk.c  	struct mm_walk walk = {
walk              317 mm/pagewalk.c  	if (!walk.mm)
walk              320 mm/pagewalk.c  	lockdep_assert_held(&walk.mm->mmap_sem);
walk              322 mm/pagewalk.c  	vma = find_vma(walk.mm, start);
walk              325 mm/pagewalk.c  			walk.vma = NULL;
walk              328 mm/pagewalk.c  			walk.vma = NULL;
walk              331 mm/pagewalk.c  			walk.vma = vma;
walk              335 mm/pagewalk.c  			err = walk_page_test(start, next, &walk);
walk              348 mm/pagewalk.c  		if (walk.vma || walk.ops->pte_hole)
walk              349 mm/pagewalk.c  			err = __walk_page_range(start, next, &walk);
walk              359 mm/pagewalk.c  	struct mm_walk walk = {
walk              367 mm/pagewalk.c  	if (!walk.mm)
walk              370 mm/pagewalk.c  	lockdep_assert_held(&walk.mm->mmap_sem);
walk              372 mm/pagewalk.c  	err = walk_page_test(vma->vm_start, vma->vm_end, &walk);
walk              377 mm/pagewalk.c  	return __walk_page_range(vma->vm_start, vma->vm_end, &walk);
walk               89 net/atm/clip.c 	struct clip_vcc **walk;
walk               97 net/atm/clip.c 	for (walk = &entry->vccs; *walk; walk = &(*walk)->next)
walk               98 net/atm/clip.c 		if (*walk == clip_vcc) {
walk              101 net/atm/clip.c 			*walk = clip_vcc->next;	/* atomic */
walk              317 net/atm/common.c 	struct atm_vcc *walk;
walk              320 net/atm/common.c 		walk = atm_sk(s);
walk              321 net/atm/common.c 		if (walk->dev != vcc->dev)
walk              323 net/atm/common.c 		if (test_bit(ATM_VF_ADDR, &walk->flags) && walk->vpi == vpi &&
walk              324 net/atm/common.c 		    walk->vci == vci && ((walk->qos.txtp.traffic_class !=
walk              326 net/atm/common.c 		    (walk->qos.rxtp.traffic_class != ATM_NONE &&
walk             1811 net/ipv4/tcp_input.c 				goto walk;
walk             1827 net/ipv4/tcp_input.c walk:
walk             1917 net/key/af_key.c 	struct xfrm_policy_walk walk;
walk             1927 net/key/af_key.c 		xfrm_policy_walk_init(&walk, XFRM_POLICY_TYPE_MAIN);
walk             1928 net/key/af_key.c 		rc = xfrm_policy_walk(net, &walk, check_reqid, (void*)&reqid);
walk             1929 net/key/af_key.c 		xfrm_policy_walk_done(&walk, net);
walk             2346 net/key/af_key.c 	xp->walk.dead = 1;
walk             3323 net/key/af_key.c 	xp->walk.dead = 1;
walk             1192 net/l2tp/l2tp_core.c 	struct hlist_node *walk;
walk             1205 net/l2tp/l2tp_core.c 		hlist_for_each_safe(walk, tmp, &tunnel->session_hlist[hash]) {
walk             1206 net/l2tp/l2tp_core.c 			session = hlist_entry(walk, struct l2tp_session, hlist);
walk              125 net/l2tp/l2tp_debugfs.c 	struct hlist_node *walk;
walk              130 net/l2tp/l2tp_debugfs.c 		hlist_for_each_safe(walk, tmp, &tunnel->session_hlist[hash]) {
walk              133 net/l2tp/l2tp_debugfs.c 			session = hlist_entry(walk, struct l2tp_session, hlist);
walk             3872 net/netfilter/nf_tables_api.c 		set->ops->walk(ctx, set, &iter);
walk             4169 net/netfilter/nf_tables_api.c 	set->ops->walk(&dump_ctx->ctx, set, &args.iter);
walk             4947 net/netfilter/nf_tables_api.c 		set->ops->walk(&ctx, set, &iter);
walk             7307 net/netfilter/nf_tables_api.c 			set->ops->walk(ctx, set, &iter);
walk              211 net/netfilter/nft_lookup.c 	priv->set->ops->walk(ctx, priv->set, &iter);
walk              310 net/netfilter/nft_set_bitmap.c 		.walk		= nft_bitmap_walk,
walk              684 net/netfilter/nft_set_hash.c 		.walk		= nft_rhash_walk,
walk              704 net/netfilter/nft_set_hash.c 		.walk		= nft_hash_walk,
walk              724 net/netfilter/nft_set_hash.c 		.walk		= nft_hash_walk,
walk              512 net/netfilter/nft_set_rbtree.c 		.walk		= nft_rbtree_walk,
walk              560 net/sched/act_api.c 	if (!act->act || !act->dump || !act->init || !act->walk || !act->lookup)
walk             1187 net/sched/act_api.c 	err = ops->walk(net, skb, &dcb, RTM_DELACTION, ops, extack);
walk             1518 net/sched/act_api.c 	ret = a_o->walk(net, skb, cb, RTM_GETACTION, a_o, NULL);
walk              416 net/sched/act_bpf.c 	.walk		=	tcf_bpf_walker,
walk              225 net/sched/act_connmark.c 	.walk		=	tcf_connmark_walker,
walk              707 net/sched/act_csum.c 	.walk		= tcf_csum_walker,
walk              935 net/sched/act_ct.c 	.walk		=	tcf_ct_walker,
walk              381 net/sched/act_ctinfo.c 	.walk	= tcf_ctinfo_walker,
walk              271 net/sched/act_gact.c 	.walk		=	tcf_gact_walker,
walk              883 net/sched/act_ife.c 	.walk = tcf_ife_walker,
walk              345 net/sched/act_ipt.c 	.walk		=	tcf_ipt_walker,
walk              394 net/sched/act_ipt.c 	.walk		=	tcf_xt_walker,
walk              450 net/sched/act_mirred.c 	.walk		=	tcf_mirred_walker,
walk              376 net/sched/act_mpls.c 	.walk		=	tcf_mpls_walker,
walk              321 net/sched/act_nat.c 	.walk		=	tcf_nat_walker,
walk              490 net/sched/act_pedit.c 	.walk		=	tcf_pedit_walker,
walk              383 net/sched/act_police.c 	.walk		=	tcf_police_walker,
walk              290 net/sched/act_sample.c 	.walk	  = tcf_sample_walker,
walk              226 net/sched/act_simple.c 	.walk		=	tcf_simp_walker,
walk              329 net/sched/act_skbedit.c 	.walk		=	tcf_skbedit_walker,
walk              281 net/sched/act_skbmod.c 	.walk		=	tcf_skbmod_walker,
walk              598 net/sched/act_tunnel_key.c 	.walk		=	tunnel_key_walker,
walk              340 net/sched/act_vlan.c 	.walk		=	tcf_vlan_walker,
walk             2483 net/sched/cls_api.c 		if (!tp->ops->walk)
walk             2495 net/sched/cls_api.c 		tp->ops->walk(tp, &arg.w, true);
walk              336 net/sched/cls_basic.c 	.walk		=	basic_walk,
walk              706 net/sched/cls_bpf.c 	.walk		=	cls_bpf_walk,
walk              206 net/sched/cls_cgroup.c 	.walk		=	cls_cgroup_walk,
walk              706 net/sched/cls_flow.c 	.walk		= flow_walk,
walk             2550 net/sched/cls_flower.c 	.walk		= fl_walk,
walk              443 net/sched/cls_fw.c 	.walk		=	fw_walk,
walk              418 net/sched/cls_matchall.c 	.walk		= mall_walk,
walk              665 net/sched/cls_route.c 	.walk		=	route4_walk,
walk              760 net/sched/cls_rsvp.h 	.walk		=	rsvp_walk,
walk              202 net/sched/cls_tcindex.c 	struct tcindex_filter __rcu **walk;
walk              213 net/sched/cls_tcindex.c 			walk = p->h + i;
walk              214 net/sched/cls_tcindex.c 			for (f = rtnl_dereference(*walk); f;
walk              215 net/sched/cls_tcindex.c 			     walk = &f->next, f = rtnl_dereference(*walk)) {
walk              223 net/sched/cls_tcindex.c 		rcu_assign_pointer(*walk, rtnl_dereference(f->next));
walk              713 net/sched/cls_tcindex.c 	.walk		=	tcindex_walk,
walk             1397 net/sched/cls_u32.c 	.walk		=	u32_walk,
walk              152 net/sched/sch_api.c 		if (!(cops->find && cops->walk && cops->leaf))
walk             1372 net/sched/sch_api.c 	q->ops->cl_ops->walk(q, &arg.w);
walk             1944 net/sched/sch_api.c 			tp->ops->walk(tp, &arg.w, true);
walk             1963 net/sched/sch_api.c 	q->ops->cl_ops->walk(q, &args.w);
walk             2149 net/sched/sch_api.c 	q->ops->cl_ops->walk(q, &arg.w);
walk              673 net/sched/sch_atm.c 	.walk		= atm_tc_walk,
walk             3053 net/sched/sch_cake.c 	.walk		=	cake_walk,
walk             1782 net/sched/sch_cbq.c 	.walk		=	cbq_walk,
walk              536 net/sched/sch_cbs.c 	.walk		=	cbs_walk,
walk              479 net/sched/sch_drr.c 	.walk		= drr_walk,
walk              486 net/sched/sch_dsmark.c 	.walk		=	dsmark_walk,
walk              692 net/sched/sch_fq_codel.c 	.walk		=	fq_codel_walk,
walk             1658 net/sched/sch_hfsc.c 	.walk		= hfsc_walk
walk             1566 net/sched/sch_htb.c 	.walk		=	htb_walk,
walk              120 net/sched/sch_ingress.c 	.walk		=	ingress_walk,
walk              253 net/sched/sch_ingress.c 	.walk		=	ingress_walk,
walk              279 net/sched/sch_mq.c 	.walk		= mq_walk,
walk              613 net/sched/sch_mqprio.c 	.walk		= mqprio_walk,
walk              384 net/sched/sch_multiq.c 	.walk		=	multiq_walk,
walk             1263 net/sched/sch_netem.c 	.walk		=	netem_walk,
walk              407 net/sched/sch_prio.c 	.walk		=	prio_walk,
walk             1499 net/sched/sch_qfq.c 	.walk		= qfq_walk,
walk              423 net/sched/sch_red.c 	.walk		=	red_walk,
walk              692 net/sched/sch_sfb.c 	.walk		=	sfb_walk,
walk              912 net/sched/sch_sfq.c 	.walk		=	sfq_walk,
walk              288 net/sched/sch_skbprio.c 	.walk		=	skbprio_walk,
walk             1920 net/sched/sch_taprio.c 	.walk		= taprio_walk,
walk              528 net/sched/sch_tbf.c 	.walk		=	tbf_walk,
walk               37 net/tls/tls_device_fallback.c static void chain_to_walk(struct scatterlist *sg, struct scatter_walk *walk)
walk               39 net/tls/tls_device_fallback.c 	struct scatterlist *src = walk->sg;
walk               40 net/tls/tls_device_fallback.c 	int diff = walk->offset - src->offset;
walk               43 net/tls/tls_device_fallback.c 		    src->length - diff, walk->offset);
walk              320 net/xfrm/xfrm_policy.c 	if (unlikely(xp->walk.dead))
walk              392 net/xfrm/xfrm_policy.c 		INIT_LIST_HEAD(&policy->walk.all);
walk              419 net/xfrm/xfrm_policy.c 	BUG_ON(!policy->walk.dead);
walk              435 net/xfrm/xfrm_policy.c 	policy->walk.dead = 1;
walk              835 net/xfrm/xfrm_policy.c 	list_for_each_entry_reverse(policy, &net->xfrm.policy_all, walk.all) {
walk             1240 net/xfrm/xfrm_policy.c 	list_for_each_entry(policy, &net->xfrm.policy_all, walk.all) {
walk             1245 net/xfrm/xfrm_policy.c 		if (policy->walk.dead || dir >= XFRM_POLICY_MAX)
walk             1311 net/xfrm/xfrm_policy.c 	list_for_each_entry_reverse(policy, &net->xfrm.policy_all, walk.all) {
walk             1312 net/xfrm/xfrm_policy.c 		if (policy->walk.dead)
walk             1751 net/xfrm/xfrm_policy.c 	list_for_each_entry(pol, &net->xfrm.policy_all, walk.all) {
walk             1752 net/xfrm/xfrm_policy.c 		if (pol->walk.dead ||
walk             1785 net/xfrm/xfrm_policy.c 	list_for_each_entry(pol, &net->xfrm.policy_all, walk.all) {
walk             1787 net/xfrm/xfrm_policy.c 		if (pol->walk.dead ||
walk             1810 net/xfrm/xfrm_policy.c int xfrm_policy_walk(struct net *net, struct xfrm_policy_walk *walk,
walk             1818 net/xfrm/xfrm_policy.c 	if (walk->type >= XFRM_POLICY_TYPE_MAX &&
walk             1819 net/xfrm/xfrm_policy.c 	    walk->type != XFRM_POLICY_TYPE_ANY)
walk             1822 net/xfrm/xfrm_policy.c 	if (list_empty(&walk->walk.all) && walk->seq != 0)
walk             1826 net/xfrm/xfrm_policy.c 	if (list_empty(&walk->walk.all))
walk             1829 net/xfrm/xfrm_policy.c 		x = list_first_entry(&walk->walk.all,
walk             1835 net/xfrm/xfrm_policy.c 		pol = container_of(x, struct xfrm_policy, walk);
walk             1836 net/xfrm/xfrm_policy.c 		if (walk->type != XFRM_POLICY_TYPE_ANY &&
walk             1837 net/xfrm/xfrm_policy.c 		    walk->type != pol->type)
walk             1840 net/xfrm/xfrm_policy.c 			     walk->seq, data);
walk             1842 net/xfrm/xfrm_policy.c 			list_move_tail(&walk->walk.all, &x->all);
walk             1845 net/xfrm/xfrm_policy.c 		walk->seq++;
walk             1847 net/xfrm/xfrm_policy.c 	if (walk->seq == 0) {
walk             1851 net/xfrm/xfrm_policy.c 	list_del_init(&walk->walk.all);
walk             1858 net/xfrm/xfrm_policy.c void xfrm_policy_walk_init(struct xfrm_policy_walk *walk, u8 type)
walk             1860 net/xfrm/xfrm_policy.c 	INIT_LIST_HEAD(&walk->walk.all);
walk             1861 net/xfrm/xfrm_policy.c 	walk->walk.dead = 1;
walk             1862 net/xfrm/xfrm_policy.c 	walk->type = type;
walk             1863 net/xfrm/xfrm_policy.c 	walk->seq = 0;
walk             1867 net/xfrm/xfrm_policy.c void xfrm_policy_walk_done(struct xfrm_policy_walk *walk, struct net *net)
walk             1869 net/xfrm/xfrm_policy.c 	if (list_empty(&walk->walk.all))
walk             1873 net/xfrm/xfrm_policy.c 	list_del(&walk->walk.all);
walk             2200 net/xfrm/xfrm_policy.c 	list_add(&pol->walk.all, &net->xfrm.policy_all);
walk             2210 net/xfrm/xfrm_policy.c 	if (list_empty(&pol->walk.all))
walk             2220 net/xfrm/xfrm_policy.c 	list_del_init(&pol->walk.all);
walk             4321 net/xfrm/xfrm_policy.c 	if (unlikely(pol->walk.dead)) {
walk             2036 net/xfrm/xfrm_state.c int xfrm_state_walk(struct net *net, struct xfrm_state_walk *walk,
walk             2044 net/xfrm/xfrm_state.c 	if (walk->seq != 0 && list_empty(&walk->all))
walk             2048 net/xfrm/xfrm_state.c 	if (list_empty(&walk->all))
walk             2051 net/xfrm/xfrm_state.c 		x = list_first_entry(&walk->all, struct xfrm_state_walk, all);
walk             2056 net/xfrm/xfrm_state.c 		if (!xfrm_id_proto_match(state->id.proto, walk->proto))
walk             2058 net/xfrm/xfrm_state.c 		if (!__xfrm_state_filter_match(state, walk->filter))
walk             2060 net/xfrm/xfrm_state.c 		err = func(state, walk->seq, data);
walk             2062 net/xfrm/xfrm_state.c 			list_move_tail(&walk->all, &x->all);
walk             2065 net/xfrm/xfrm_state.c 		walk->seq++;
walk             2067 net/xfrm/xfrm_state.c 	if (walk->seq == 0) {
walk             2071 net/xfrm/xfrm_state.c 	list_del_init(&walk->all);
walk             2078 net/xfrm/xfrm_state.c void xfrm_state_walk_init(struct xfrm_state_walk *walk, u8 proto,
walk             2081 net/xfrm/xfrm_state.c 	INIT_LIST_HEAD(&walk->all);
walk             2082 net/xfrm/xfrm_state.c 	walk->proto = proto;
walk             2083 net/xfrm/xfrm_state.c 	walk->state = XFRM_STATE_DEAD;
walk             2084 net/xfrm/xfrm_state.c 	walk->seq = 0;
walk             2085 net/xfrm/xfrm_state.c 	walk->filter = filter;
walk             2089 net/xfrm/xfrm_state.c void xfrm_state_walk_done(struct xfrm_state_walk *walk, struct net *net)
walk             2091 net/xfrm/xfrm_state.c 	kfree(walk->filter);
walk             2093 net/xfrm/xfrm_state.c 	if (list_empty(&walk->all))
walk             2097 net/xfrm/xfrm_state.c 	list_del(&walk->all);
walk             1000 net/xfrm/xfrm_user.c 	struct xfrm_state_walk *walk = (struct xfrm_state_walk *) &cb->args[1];
walk             1005 net/xfrm/xfrm_user.c 		xfrm_state_walk_done(walk, net);
walk             1013 net/xfrm/xfrm_user.c 	struct xfrm_state_walk *walk = (struct xfrm_state_walk *) &cb->args[1];
walk             1045 net/xfrm/xfrm_user.c 		xfrm_state_walk_init(walk, proto, filter);
walk             1049 net/xfrm/xfrm_user.c 	(void) xfrm_state_walk(net, walk, dump_one_state, &info);
walk             1640 net/xfrm/xfrm_user.c 	xp->walk.dead = 1;
walk             1796 net/xfrm/xfrm_user.c 	struct xfrm_policy_walk *walk = (struct xfrm_policy_walk *)cb->args;
walk             1799 net/xfrm/xfrm_user.c 	xfrm_policy_walk_done(walk, net);
walk             1805 net/xfrm/xfrm_user.c 	struct xfrm_policy_walk *walk = (struct xfrm_policy_walk *)cb->args;
walk             1807 net/xfrm/xfrm_user.c 	BUILD_BUG_ON(sizeof(*walk) > sizeof(cb->args));
walk             1809 net/xfrm/xfrm_user.c 	xfrm_policy_walk_init(walk, XFRM_POLICY_TYPE_ANY);
walk             1816 net/xfrm/xfrm_user.c 	struct xfrm_policy_walk *walk = (struct xfrm_policy_walk *)cb->args;
walk             1824 net/xfrm/xfrm_user.c 	(void) xfrm_policy_walk(net, walk, dump_one_policy, &info);
walk             2208 net/xfrm/xfrm_user.c 	if (unlikely(xp->walk.dead))
walk               88 security/device_cgroup.c 	struct dev_exception_item *excopy, *walk;
walk               96 security/device_cgroup.c 	list_for_each_entry(walk, &dev_cgroup->exceptions, list) {
walk               97 security/device_cgroup.c 		if (walk->type != ex->type)
walk               99 security/device_cgroup.c 		if (walk->major != ex->major)
walk              101 security/device_cgroup.c 		if (walk->minor != ex->minor)
walk              104 security/device_cgroup.c 		walk->access |= ex->access;
walk              120 security/device_cgroup.c 	struct dev_exception_item *walk, *tmp;
walk              124 security/device_cgroup.c 	list_for_each_entry_safe(walk, tmp, &dev_cgroup->exceptions, list) {
walk              125 security/device_cgroup.c 		if (walk->type != ex->type)
walk              127 security/device_cgroup.c 		if (walk->major != ex->major)
walk              129 security/device_cgroup.c 		if (walk->minor != ex->minor)
walk              132 security/device_cgroup.c 		walk->access &= ~ex->access;
walk              133 security/device_cgroup.c 		if (!walk->access) {
walk              134 security/device_cgroup.c 			list_del_rcu(&walk->list);
walk              135 security/device_cgroup.c 			kfree_rcu(walk, rcu);
walk               71 tools/testing/selftests/net/psock_tpacket.c 	void (*walk)(int sock, struct ring *ring);
walk              641 tools/testing/selftests/net/psock_tpacket.c 	ring->walk = walk_v1_v2;
walk              662 tools/testing/selftests/net/psock_tpacket.c 	ring->walk = walk_v3;
walk              748 tools/testing/selftests/net/psock_tpacket.c 	ring->walk(sock, ring);